var/home/core/zuul-output/0000755000175000017500000000000015134426510014526 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015134432122015466 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000216633715134431745020277 0ustar corecore3rikubelet.log_o[;r)Br'o b-n(!9%CMc;b[>Ǧ(\XGfD33e>K]=KtUgfX]&f;l6?vs ow ]??xI[+mEy},.fۮWge7Nwl~1;Zxs^~)32]$˛j{Zwg馾j?&~?|XJXlN__uαHJ2E$(Ͽ7W|'++*z>v5gCh31 )Kh3i J1hG{aD4iӌçN/e] o;ijiBx_2dd$YLYG(#?%U? ` 17ׅwڋًM)$_FiqwGtWL,u0V9c  Tt2H'b*t--ovw:Z8HU=y=o&|/'oZOSL2uQO)rat ȶ9W E%*mG:tëG(;h0!}qfJzϦ4Cmw9]٣Z%T%x~5r*N`$g`Խ!zW ݨB.wcP:e7Vt98m{x!>t®W>]tF-iܪ%ҳt17ä$ ֈm maUNvS_%qj,('GrgTZ'U鋊TqOſ * /Ijo!՟8`"j}jӲ$K3jU|C7;A)V6wr?tTWU1o jjr<~Tq: `=tJ!aݡ=i6Yݭ7}?ze\0Ac/T%;n ~S`#u.Џ1qNp&gK60nqtƅ": C@!P q]G0,d!1]r"vxH?)M"뛲@.Cs*H atyХ*Wt1z\+`E8rVQUxMҔ&ۈtq2Qt<>t\ڡc0SAA\cη}or|Eto®3uO0T-i+tǭQI%Q$SiJ. 9F[L1c!zG|k{kEu+Q & "> 3J?5OͩDH/:;P_j(PJ'ήYXF'~GہU5$o&~ayXCJ68?%;b8&RgX2qBMoN {:!%Piocej_H!CE|ɦSWPKi0>,A==lM9Ɍm4ެjC d-saܺCY "D^&M){_>:i V4nQi1h$Zb)ŠȃAݢCj|<~eQWQ!q/pCTSqQyN,᳌qpMl)QpL F2G ѭj( ہO r::1v|ћrۉt٦K7˽`!i:ګPSPٔ@5;ȕo}PkڪH9' |":", 1Ҫ^9 -lg&:2JC!Mjܽw.Ci`du듎q+;C'16FgVlWaaB)"F,u@30YQg˾_^Ҋŏ#_f^ TD=VAKNl4Kš4GScѦa0 J ()ƾm'p/\խX\=z,Mw˭x.}yWZ,).Y͆/h7n%PAU?/,z_jx܍>>o낿kg{9𚃚p9wo#z5A׋yTJ$KOL-aP+;;%+_6'Sr|@2nQ{aK|bjܒ^(מO80$QxBcX; yCùXz!bm5uA߉X})0/>nNNXYt\oP@gV ]cӰJ:q';E=-dZB4']a.QO:#'6RE'E3 */HAYk%C6Θ%|5u=kkN2{#FEc* A>{avdt)8|mg嶚TN7,TEV tɧ<Ғ8_qqEo b}$B#fethBE;1"l r  B+R6Qp%;R8PӦUb-L::Ⱦ7,V.JE:PgXoΰUv:ΰdɆΰ (ΰ0eTUgXun[g, ׽-t! Fk qc=Ú,u̹/*]lc ؠF e ˒єdV3_F;TM\jP' HT؋ Bvaijz,ljvK2Zu8C1}PcIr.e'.I(NJ[ۨzHJodo\4"+(Nck!a}P `LC ءzCwS%'m'3ܚ|otoʉ!):PZ"O5M^kVځIX%G^{=+Fi7Z(ZN~;MM/O:z_.n^2] e}gjFX@&avF묇cTy^}WHhz#AE NIZ>n ,Ŏ7Uۃ󂊹P-\!3^.Y9[XԦo Έ')J_i..VՅH4~)(KKC&wfm#Y~!%rpWMEWMjbn(ek~iQ)à/2,?O Y]Q4`Iz_*2coT'ƟlQ.Ff!bpRw@\6"yr+i37Z_j*YLfnYJ~Z~okJX ?A?gU3U;,ד1t7lJ#wՆ;I|p"+I4ˬZcն a.1wXhxDI:;.^m9W_c.4z+ϟMn?!ԫ5H&=JkܓhkB\LQ"<LxeLo4l_m24^3.{oɼʪ~75/nQ?s d|pxu\uw?=QR -Mݞίk@Pc n1æ*m$=4Dbs+J \EƄզ}@۶(ߐ/ۼ𹫘qݎt7Ym݃|M$ 6.x5 TMXbXj-P\jА޴y$j`ROA"EkuS#q * CƂ lu" yo6"3껝I~flQ~NCBX`]ڦÞhkXO _-Qy2$?T3ͤEZ긊mۘ$XD.bͮW`AީClСw5/lbl[N*t*@56."D/< {Dۥ sLxZn$N(lYiV =?_e^0)?]{ @| 6+#gPX>Bk2_@L `CZ?z3~ }[ tŪ)۲-9ֆP}b&x Uhm._O 4m6^^osVЦ+*@5Fˢg'!>$]0 5_glg}릅h:@61Xv` 5DFnx ˭jCtu,R|ۯG8`&ו:ݓ3<:~iXN9`2ŦzhѤ^ MW`c?&d.'[\]}7A[?~R6*.9t,綨 3 6DFe^u; +֡X< paan}7ftJ^%0\?mg5k][ip4@]p6Uu|܀|Kx6خQU2KTǺ.ȕPQVzWuk{n#NWj8+\[ ?yiI~fs[:.۽ '5nWppH? 8>X+m7_Z`V j[ s3nϏT=1:T <= pDCm3-b _F(/f<8sl, 0۬Z"X.~b٦G3TE.֣eմi<~ik[m9뀥!cNIl8y$~\T B "2j*ҕ;ێIs ɛqQQKY`\ +\0(FęRQ hN œ@n|Vo|6 8~J[,o%l%!%tyNO}}=ʬ-'vlQ]m"ifӠ1˟ud9)˔~BѤ]һS8]uBi( Ql{]UcLxٻa,2r(#'CDd2݄kTxn@v7^58þ Ţ&Va%ĉUHSR0=>u)oQCC;^u'}8H0]+ES,n?UU{ x~ʓOy_>?/>l8MrHID2VSsMX^"NۯDc558c&'K0L /C5YDqNe~ض˸nErc֋@aw*r܀0 a {RQXV-/p:MP\<=<^越a/bz?ܓvjIg3MN4:]U]STa,@OKd9]iSCQ&s~In/SZ % 'I Ƿ$#stV \'xMgaSZNg8>e!^f%cYr]qs:"̊;isXa]d+"v=x7p.fZCg_Ys;pE&\U}ܫSh])qKYAـhhdEnU14&G * QIQs;rԩ.k83֖8Muqu_48dHܥlWW q>fu6+'}xu\Veelz`Zbym gp8펠ˋֆ:1IC8qٞ\vXçL ]X/r}7O}Wh,h ;RQ=]u00yiC۔I^3!?H6iUH:ô 4P$rT`%2Aq-֢׍qt=@x#~0)p# ы9'iri]ͪ/@繁qVGCڤr,DihB ,m 9 _$q3= A$IC"6g^4e`Xo(D*6"^eTh'4xpFڜe'fVQ7~'c L^ԯwIڣA.}H;Ë*׬=`^ 9]r鐃 -Dfi2|QwZk‹u^6DQ1&H凎c!n[mi3)WfsF:M"uҷs.1!뾧1%s,hQs|hx̗3%*v9(I;:'>uQ+v)vR/egBhAAdh]4H:nV$tHI98/)=mͭ ڐn}}~ק?g_6WĩDRc0]rY9'z .(jHI :{HG}HDN`h7@{jnE#[dz;6nOj5~IJ|~!yKڮ2 h 3x}~ے4WYr9Ts] AA$ұ}21;qbUwRK #}u'tLi'^Y&,mCM)eu㠥Ѻ\a}1:V1zMzT}R,IA e<%!vĉq|?mtB|A ?dXuWLGml?*uTC̶V`FVY>ECmDnG+UaKtȃbeb筃kݴO~f^⊈ 8MK?:mM;ߵoz+O~e3݌ƺ(ܸf)*gCQE*pp^~x܃`U'A~E90t~8-2S󹞙nk56s&"mgVKA: X>7QQ-CDC'| #]Y1E-$nP4N0#C'dvܸȯ.vIH"ŐR ;@~y>Kv{) 9AG ćͩ$.!б~N8i"1KФ\L7/,U@.ڮO?mُa ې!rGHw@56DǑq LA!&mYJ*ixz2*{_;IYJXFfQ* 0kA".mݡ"3`Rd1_u6d逖`7xGMf}k/⨼0Κ_pLq7k!dT x삖A7 u/~&ӄMu.<|yi I?@)XJ7{ޱ?Q]{#\4ZfR-dVaz./f+yGNMGOK?2_~3\z=y}^G$*A! IcuR.o=MZ9zu b#s9@*иrI@*qQN||Ix;I}&ݢ6ɢ}{]x}_o>Mm8S]~(EX{S/{&Ά+4*Iqt~L4Ykja?BHHVuV_K2k*`cKxuBG&24T}Lai 0Va(7K#ӊ!,ZDxFQO*lם>!4ӥ2 ]8â6 U`V%`!c%؎ʨTzrKh! c.}.D>)d_ 8rcu,wf2?Ǡ*_lDn}rauyFp*ɨ:UiM2r:9ct X1lmĪ o玓,R%!`hGT LYF#g<cm${|Xdu4tmtїUJ\~dc0KcMlf2?mμQ ߉J4WrSHTdp"ӹ'cJq2zPlX̯.0H!ND@UapVoGڧD5>H]f@!=߸2V%Z 0"G4ȇʩ@]>Y$ًF_Mm_Tt)ib+q&EXFu򾬳ǝ/RS>r,C2NfOjpcm{Ll9vQOT>9U;])>6JdbXԠ `Z#_+D[7IIjJɟUh ҙ"`"a ߒ"G̾H`6yiCk(OA/$ ^%K^+(Vr[RR1"u4A.1X0=7f/"(o9/L1X{]q`Ȝ/; 9a>E)XOS K9mUxBa"'4T[Jl /K/9,rlCAj_TiǘP,:4F%_0E5IE'rX-|_W8ʐ/=ӹjhO%>| :S Px„*3_y.g9| ;b`w NtZtc> ײ1KĴ{3Gl& KT1ZWX8?C]~We$9; -.D087?1a@P5B,c}jcGȱ WW/ @a#LA4.ٹ^XڋXٝ:^Izq. ٽƎDn6ٹBc5Lt;3#i3RAٽ9| cbpcTfp> 6L/_x 'ۙz7~w~);qU9GDT! 6]c_:VlnEUdn6UˇKU;V`JUݵޙEO[)ܶCy*8¢/[cչjx&? ՃJȚ9!j[~[' "ssTV2i sLq>z@JM->=@NỲ\쀜*/) ̞r21.y? bO]3?C!yw3ޯL_Su>o>&lrw&i"< :]_<<7U_~z5є/rfn͝MLmc 6&)e+n7cyy{_~궼07R7wPuqpqo{ߟ+[w_uOq?u-|?WS_tOq?Eu-L_p?Cz .e ϿO*3 `Ђ6a-`kIf-s,RL-R`1eL~dپ&+IhYRczr?㐟,v~,b6)up)3K,RLW"Qd9JgT\1f3@Kh% a4x,kA k ^d kYj5Ah𚄓vXZhX1xҖ51Y +Id ZZ\C| fD>hB֡#-$+Jpሟ,Cg:6 3 xH "}C[`ӨOAFn5ʬLHϰ:N@VcyBI#Dr. "h hg ۃm-qu>V&൘ G7qi#^tҒ[JI!{q*lrD܇Gk@;oI<5xZ4xM"؇'k!>V|lk'{d+ :sXӄc)?W`*|\v aVT0"tMًcΒVz]T.C$cEp._0M`AlF̤@U' u,—rw=3}resLV&ԙy=Ejl1#XX۾;R;+[$4pjfљ lݍ3)`xvcZRT\%fNV Q)nsX }plMa~;Wi+f{v%Ζ/K 8WPll{f_WJ|8(A ä>nl"jF;/-R9~ {^'##AA:s`uih F% [U۴"qkjXS~+(f?TT)*qy+QR"tJ8۷)'3J1>pnVGITq3J&J0CQ v&P_񾅶X/)T/ϧ+GJzApU]<:Yn\~%&58IS)`0効<9ViCbw!bX%E+o*ƾtNU*v-zߞϢ +4 {e6J697@28MZXc Ub+A_Aܲ'SoO1ۀS`*f'r[8ݝYvjҩJ;}]|Bޙǖߔ 3\ a-`slԵ怕e7ːزoW|A\Qu&'9~ l|`pΕ [Q =r#vQu0 M.1%]vRat'IIc(Irw~Z"+A<sX4*X FVGA<^^7 vq&EwQű:؁6y\QbR9GuB/S5^fa;N(hz)}_vq@nu@$_DVH|08W12e_ʿd{xlzUܝlNDU j>zƖݗ&!jC`@ qэ-V Rt2m%K6dX)"]lj齔{oY:8VmS!:Wh#O0} :OVGL.xllT_oqqqLec2p;Ndck[ Rh6T#0H Q}ppS@ώ@#gƖ8sѹ e^ CZLu+."T#yrHhlكʼE-X'I^=bKߙԘ1"+< gb`[c1髰?(o$[eR6uOœ-m~)-&>883\6y 8V -qrG]~.3jsqY~ sjZ+9[rAJsT=~#02ݬf¸9Xe>sY~ ae9} x* zjC.5Wg󵸊y!1U:pU!ƔCm-7^w]斻~[hW$k sE0ڊSq:+EKٕ|dvvjjy6 æ/ML-yz,ZlQ^oAn-})xǺǍ--qcl:WLg ӁvJ[ǧc~Of+8qpçco#rCtKӫce0!Y-+cxMK-H_2:Uu*corD~@N`#m~R:ߙ歼!IZ5>H;0ޤ:\Tq]_\_>e˲\oUQ\Wߋ47WwߋKpwSSۘF,nC.\UߋoVEuY]^VW0R=<ު˜˻ x}[ێ'|;c^ M7 >5\-> m-8NJ\ALd!>_:h/NAC;?_ξqĎ6xMY(=ͯl~l8V0٨T zL{Ac:&$ ^CpH*DW\r2aR|=(L X1|wrO_g ux1^^V2޲jMi^b``Q#dBxV#NBk1;DAV$"*1]Y~ d->'I`qczF_/K:L8;&)qM /緊dɱݲI+98bWtWiA'أ$i2Oȡ*L*98Hp6E)CUl>;ܝkU rNCՅG$p QH#Ky*Дc i2r '`R8XŨxEYYq{\~)ϣB6IPM|A<3ZGepJ14y ]oՌqqp?¢`QI-OxeɠNErp9ȡ AzSy(|2΍x$dz0ga A{ y{0o b(nU@tJJ񔔭))[armeFF ^zh|Y\نYt_WiH7阖븖K %"kҟ=֛7GVXڮmW"BuP8[J-c$߱~ /E&T!2kgMZ8ubIiU#'2C{LžC .zbI=dq9 _ ,xfpOaZ<.*x=\>j-/Q,~f##b<˖ 3,#`ItӲ6yYhdOfY-Qv]d(G-;?ehed!OFA6 y#* u|^[>n`m @Vi(ol]&+bwr:1|p!a01~ /8R@쮴mFX6Y|e:xxMdnt&Hò^v>th k;ysfNg,= Ƈ [öxb'p*4w'(yC gP\UQ'Umqeuc Ifn6z0E,{{e?e%`f9z6ou.rIƝݷ6\mMw t7IVSWx4KkwwC[7TOU}Hlb$!Nk\fx<4[9 I/  ]|m,%y Aߡoc>Ia.ң)U#&˪F'T/ a}cĀLeX<3(c~RW WK+X4(v2xvF 3HH1F} !$g\b[nFTmCmRƖ!Olwc{Ĥo﮵!!ڏVc0SM7?om;f؉Ģl9)f%C@GqH_BVo5&eUI>]xn|:=r@9VpK M{dީi[ea `.>xRE`К-xȜ%FؗqKw|tE-Ј3/?%ш&wGSXd8p,X4gFy# k<ԝ u GWC)+58@Uv|tB0MWYV1D8=Pp|Ծ[-)Iې*5-c;,$S[M'RsUז*LʻЁLM%mnM)kucKbΎ#f})~ h2a!̊ W-W" w[@8Enou l]z.v_c"9#rQXfOgH. _>R>eCx"0Qf2zMm'N'}G Z R'>{m> nq&hFs3lPg6#sjYS4+ֱ\ (nGL1ݸj^U&$ՇV 6ا({!lEO?y!=}+>T$^ Bldyp k?ز訩|P9G:ĊA[jsmH9X :izU\Dc֖Ûړ$jl3*צ%3u!iu3Sio vyAcQT64 &zc[pTyPy<6Gu+œ4+J*a34=9☞'V.47zaUtm(7AmR7)%T94oh ʻm6K</,{=jW}mXp5}?ʫX0#نEИIC1O][Wrݵf>\E[KEӺY`?,̵9cr唛ڲl̑(Egݎ|7 "ߌx, ,+t]. |AdU&z( yhKXjYA|Us;&dX-pAhWa]scj7+iY,֞"-aR)iS"("XS)=>]Kk*ME6naѩRˑ:wd=flIM)2LFǩ'=m)eɼ)+u9~_u<|wFWmGO.ُWycUN4в2,!,QFo;Y]"})0KǢ=lTWIWie7,U0uuڮ\vծ: ũz\[6ndsoܭ5n'jLB?oa.=f&۟&޻QߓΚ$t5fОpT`"LY~b&]xύ_}ѾUd&K}~%@ɨ#f:8{ԻY&=Lgance/27;x,[ 7)æE+ ߋM̢=E ^ a!~&8ڛ!ً};=Mam3v>$?Z͐mn1ojtSM̼a?p '7mtǶEc2Ҟ1ޗۀMz oHh3{ 0 gwe=I|`41[k@j+zIA뼽_9]NK}1^|0W[]p8{7Fن=ew q8R@=efvѴ;fiؘ LxA9^r(&G7M/3,N/(KJM>b%F "1ـT%V6&<{Fۓ.?ۯO |[g1oYw-mZ`8s;{M ଷ:z'vLP$eI-)= )ȒUɠcvMI$|up^$1y{"$qt)LA:O&?57&E[}Lu=uzXOY@G}pB@p)TkU@Dz߼RVq@'5l>E[S}Q͛dfe6IT~|2í/Ĩ{N7m t[Y@`no .ಘcA~x{o~8<>𗝜[cӗ[uS6@q^AC?6P4 9~?xyupc /K! 'c9}@t @tSxaPx?eH x~ IB)Ht\ aۑ?p QSjb{R"OZl 0! cӞVe!`0#t`4: (:{wޠD#tP@ػq({ECdl48m=q9,z΢_[P/ ͻ/ijĬ#a`5aAa>"&h:8- _=N grP^F@3PӞ߰oR-. Zv2ݤ(Kc[=,M^v| ϙj d4П>jwS^>Z>Hvz'\A"[=5OuFI 8NwXA_-ܭ3w\'DgXďR:q]Jg,핞=q.I\8Ցьi9pWCa]w5T]qS -s ދ8r}Թs244]~,Fs֫<-k%%s +~Z2sGLrt<5lO\oο:tTMSH$"e!5q$H"hQ̪ԎE L4ڮ#E4D8XY,JKD0ap= <WQy+^Kbh ?jG$L$㲸":*l4S_ $u0M $ƶxk{O8FUr.Ǖʫ+)/ɓ,EX]0Y!XaFLm8R35(q`9^ߡMoY"fy&e&pBR|x .Idv]5-˴b!sBxԵ=\EN1kU@o׃z=adFܐf Xt*JK@̿pLq9a~^A9$=ic@2B4 y]Mrة o.q,8|a+NJ -SR$Jf[k⪹`ԅjߨp#zWR ZbEӧW NG(2cE%ّg, Qa^ȩwd̴l L'7zQC4xd~`R;Qn[-Utg)5Uxu%l.eHU'46`v5Wd'as7\Ju5euEvSOz;sW,WRJ-NS ]CbMCXʡ2JU[*u6r-WT`M4.b ;1ُW1ظK7(,?,f;j56s8`/-rO:gPOO>IQ ~"W r ~?`wҙ8_7b *Z;NM&1/C\Dz,vɟq|!=>Ex/L)p0`=~}qRܠ`rStE`']w[-E z> b[(@MG@TiLBK`4?zQ +Gib7Gd1O7vLע 鹔A,4Hͫ5)/]K8o0lrЂPsT՗DiZI6D ֶcy@+4h3r({јѥJS\цכ)H̐+ -$ZSH= -E˧و;-1cMezMi"^a]i jGS60<} aؐMQ"gN`{ •2M yc@@$fz IP #`Jha0> \裎KlZ.Ņ) [Z6حzv}jܹ.[mA,۞V4Rَ- ޞPi;joAL:O#ّPg BeB-u'}[-mA=v$ۂPP B F#˄[lOh4B /ʷ oO(|GBBtc mHa o/H5͋Oxnn"ό:<5Q;ϋI6VYUh:s f|F C'Jc 0˜$ ˴hjS:=%KD)#$gF3Ӏ.A:6=͢,OU)tьFeV]oQq`)(aPσa$9rNUD,\qJ/ eu7P3~$OUIc蠵.n1Pv~QR`9 QYFi˟dhHoaU.%if χhE|Ji<# !t4*{9 iu8 o `PM8=J:jA Pi\ӖZ\ ƃ#49{ `ׇ3Ch˚~l}JRclG_ex롿< ''t'uSXi* leAcw5N>A( "|[nd9,O KUZF`T-ۥܻ4)^CΆ[fuYL 3cRvu.QW&sA摈۰b E<~[Fq H{q$ʮ$8rH-5? 7eBNdߛ _ ʈB0EO:dN;DBh3L",;K5Oc WϢ0oLDOjH%Glh:'fͣ"9ɒ]vP@ۅ .9yV0Uko?\jyVgS_5W=,;P7vsFA x2hQbw6l6ULH>*/^Zprd.WW%JSl)s9T,bi߸F, "}% 1 TQl$(Ї^brYվüuPq[G(!k&#}S%a-s81g"XȐrN[ ' K+S>,h D6FGD=@kZaNo #.o=S`JU*":UB*``L ؅ ao k@iփY{~SP-&X>w2P k`[teTT,?H\<Hׯ%xr(e+e:-/]N/a>p,IUt9ǵ5DHަ3/ QG8NTٿ%9_A&(u컔PD CJ0#-7x={{i+=Y.׫UfmףqF 1G'`0tOIz?/4IfքtƨÕBE\/ރY5u>f GQr M 171+CEEW vk>9$Տ}YT}/?gg +V矯YxJk 73 |oW w$[pSοݵCah 6 ifVu? o /G V$ 붑c/xAgMWk|j" i:Na_< Q$kSk0?_׽qAuI4 ݨw.B;)aq5-rOaZ4,p<ˁ#,hag{uP!:@֞p~? :pc흥eV!ށ'QLk)d: Nˌ^1.9c謂*M]bU j̈"r['3q,\qQI yLYY(7if_yASr1ǣ: N F.х@D[m3&Pf>~@i) G ,dj?mdļq/;bQW,#$F`'%4I/t/*+Oj{8/izq2qORࡎ_<ٞ3&G_/?g.N:;}DqvmзE/\\kNߡ7چқ&TwG{EX{LM?a˥p8pFf;'/I!gJ~yl[S Cw=.Sz^rbCOpа(˿9jk3ZU3]bIȬ7J`#f?a$f[*-zr3Hy_#u n?9ENy]ճd*ZpAf/a@/=J"1UFk2if}PLYl1UKM:'sI# Ѥb2c/TנLvoOILdbXj2RVkp3lWksB[ G+3G1eUȜ!L wK:D]x #MMROzvBx^~/(kF@]GZh$O?prP[6N?Kw1E_D:;/,G z<]ps-7 oSXxC"2gwoX۬DE,jB$G1vnWR3zn)FH]FH%~J>G딘lh0 4(-nLsKE∑tWzOck]BX.I,y9S\D4F;&Z c0Y%dJQ=W E1ѸҴ8%H>:lv$8Y1,ܧkdd`&LLEȓp dc$\n SKM{D cDt%g=Ag/"s]IluGsсRg o}ʎQ(p޸pdv`&-cİgɛf|^fé쫅\3KYQh]m Y H M1cd[u>&L UYjOyIc-G!FL/[=YV17 kIb +(*6bPWObƪ6RdtbVԘl R!(<6GP@gTi ^;V0%is `$]y3!13WZ+YK5DYشwYHeO(0 vX:õ%"0b7lI#F[=N{Fz,(ZX tHDgQsjS]o+BJ *GHlcubQx/1]v=KRb-#O3F&eUTL2 0aI< [eL`XAwS.ۨB;1,c5 |al{pH]+S Bm!ldBu~hR#*g N/7fZ.69.ykV#;-RML}2D` PYڼpչ@0VUz}C5 {XmG1oa"k7',˹x~CMff 陼^cs7 pqble|"$]0wSLϺMcHGs)B5Z]Nn ;גh</C[tչq,PKpǵIO/rpi,i~ٜ Ef+nj<bfH?58Q~ުzaX[RҲbCa1NӺ-?Wn?#@uS3g3>;SpS݋OVDVX3\ŤR1bŊ%մy;5=̬qxaǭ [OwmtjJΈkJ/,8YۮA\t'˚Bw6 з^Zl?(9Sʫ chs~1ιUC,&ްIlz:JPۛ($B>k tM۾Ǽ(8140,REI%mz+3 k iwD'1:e5FK}8ۃ NڀE ]SSqk. ,ײ31tJiɚ(̥U36d"٣ ;Si] 祘Ct9WmiؼD^Ȳ=bcom4rg0y #7kzCMh mCrf0VA έX -Q38)YZI&-p.ݍ{fr,ƥv[51T ^_ N}Ĭ#Rl 5CC`>ϥ*65[ʠsA}>><] !x7g b6ЅvN bî(L*p:Ĩq}9:^Vn\iU{B7xoI5idZ&W1Fܭ6_;Kf.U^8D(o#!L噅nM9E& 1䮵Mkkq-% V5&VHy8]pSy"# ʥaMe3v]ٱt{u[kf0k|nh/:\mnl:TTY>vԘX"ƞˊCaCK &>EL0x_`sڮ MwXz+N =Ŕۮ:>=`ybif Rvib&yDRzpg0a=XM0m1ړGn"<{0F]nurW4'Xvy9T%po0~mF*1Ts .SPZVő`M1k tᦀs"{$8mtXW'0ƹ) rKq?" +D=zvi,㜋1,[ mL۪-d/`g2_P_JK3,B0Ӑ/] [R9<.d^K 2ot.T޴71blN+enj,dFRn!mxHF> `w[ -%gƐS i5wf~u9N7=z5½絷"zpzu) >^E yp@Cj>pg^I ܼ"wyESySyو!&@7jpN6v4уI,M,RGJ 5B{Q޾'ܖti U2ڙm(d 3e 4&y,G@njDHlotPKs\5H4|G9֩bQ8d^Jc ?.ꐒH1pEBKɀLԎܾ?Elj x}:]hѻX_9mmJްqSAWh\iZG =BLgEoi\iYѷ;>nҐ|a[-MӸ^rib}FvYknwʳYN2 .CIY]X$-&sױukvik2zX*Fq67V!_M>#l BҕB~%)FGv✻Ip4}Nb2ގW?rUyLfIXڮt&X*skf*djTRIA䖲wIh1#{{SMjꁋʊDP7`FYl9mI%HS ‫zcHG^zEYy|OճK۾滹m_W(_IaKY5~k ◮=a1{{/)JdO f!F _G8~lå;rfǘ9Wn8%DⓋ3W`$ZSQ!# ׻roHpd!ެ Cw¾9lHpx6Ti|C{$8j\_s+\,R_-x}98 |,RƤ\6]!qR=i#۞KkO3 LiJzWD:{Sm^s[D%2 G=.UE|4L|f5z)(OBD@Ͽ~LPNq=Z5DGޥ_JIO;@g=qJh2ltH in!&"Ch:qk\hǶ$$bl>~ngY~`4'ERz'DwU@)(F z~{g-u|<= ^LFf^'?/ FmO:ɅsN?z,f0'9<]G)48+< e_~Yqj]iw*]>l4(3'pAA^L__30EO~{gUh8 g}_D2sb4X^?q0N'Tb W6CxۿPA\|^p~KN&Im%`?~6S{qkij9 Ǔ@8rl owV0C8j?-$| ar@@0mD$N 3{q|_1+Ŕ!x|% & vNpJ{埐L /9OfYK0fGo$/<#+fH\{4>Ǝg ,87or{}YxDS?CxQѬhN/.eHh?CppYs,9jd,`6*L%W=h%$K+z7S9mT\Ĝǘ З<(E## >!9:dv6ZRw6].߸Oa~QUfWd&$8g,Չ>'97:k/h,.+H蛛ҊhyF}vfC_zMJ$5fF~?m͢(טeQ 03YTWdQ -_-mVE5Ɔ2Def_%naB70y+fMc_~ 0f>=%Lm&-;kQR" f[g g ^: d&WeF4 Hl|1j_4, }93gQC9Umj7󟂫:Z dp_|Lns7/_`|@Qp>c 9[0qޞ4<oO~wmf+P5;d_V"JxM4ڝߜo&/ T-%V̄_ӹF|{-.M,Bj|N)7JB- hAi6M&]srUt` _28{~ H`0Bzp>:ʇ%x#x?xL!'mq4>Xd5A\N ̧o f~1We{a. SIԡÿjf հ1Bb9NL6iJt\me6 3o(6CGNvlhǷάD.ē"ki˺yάCHbچ,_?d{{h;'GCMˌ0 }>bJ=$\'(!"M; OH9d~jNÀ2,m@28*~T*J%iꌢ0١ՙ Yq)]Y RيE"y5#Vuuf#ȴnm YU>e A-sS)b|WX)y}H|5/x0lf e,FʍL*L?34.#B $zN (y&E]>YƠ~i%#{ a6S!)l$a60HKdTڄV.,ҟ+60 pt@$-2IHsֆ`xVk П".I-2_c<4AQ.0_8¿S0b>q!s+Q\ S8eDX >3\yNP6u]j%FqՈDjIz!T9h2M YJ$#'SrM? {pAkjSo%4ÙJ4dX!P[樆,;u$!J<7@8HL8|ÛXmuR"Zyf*^48~X!FqJDuMR)9M ה@nqC8j2?`SA 2YF[Jyv[=4F -\ZӐ.L;w Ncm#lw+˹[븴y%  xCp)cz*a3SS`܆Y0OwJ}#րۀ8WO6H|;T{NQ0qmSݡ`45hNd ZXEG.DZuUVm?pp¼b}).K \ 6.|K&_"1I"l2=;Y@><"4['ԵG1qGq~|ujMSFEUk/f0jferZ!}D=X^MI`i͑G%=BaλLeHe02qք5f"arإF{3d21>dkf^'z]zG)bB 6X8}DZ}V[S`,RrH^#C 4Y0QbE ("ofk/pE\ ! ZPy`%DQͭ7$8 2G3 s#eoYJD7@1= . TJ%D ^gjaܰaS0NR*Rjo$H)ɜp^:gJMi~B˭vLM&Sh EJALtJwa!QҚwh5VǗqȇS`Lac)aK.p7ep$2`!м5iu2YmS,B@=:e)ϜL:KB6ä\M"60Q )m`S6 =٩hDH8j_YA*Ve'UuJo!g)1N^ޔ'/t vj)3LA%LT`Jڧ'\jmyzh%)UAj!mPB16hB k2IUgPVOCzQ  nZޔ)""`eց=T 0CmփK[!%NiuHo[XE,0Raڒc}(,T7E3oN6)p[u`jڏmszpHr(ay {a=x;W oOV*3̷/W!ŝhqg ?Օ%[lDnq:<M2cwDB8bz=٨r5`bp6gQ,Wz- 倫2E;x}9!kը1r?u`JڟawO=ujNȞpe#$ܩ(Q\0br);pHd'@̉ piΧN8>FphHBhS0SH zkޔ3Tltl8Ltqt"br镟fg O TGl <^L:Y/wشiQ j8{#mW~٥ũjO֑|gΦ{_2M$׈*xz! _f+ g *5 AJ3s"f:3+6(:O,oŰƇ3xZFߔE*"BhFO~uWpކyav8Z+ؿbvm{xV]9œ4N&ñ zz u7,7o{Ep@@>wN͙Ls~<~0[f<^My/[ҼF~ރdXpB]z˟Mڔ6\&F3a Q=Wh*@5oY ڠ4/a RӭToDKG!(o@nLj x4$|Egcܻb /_K/nr:=N TP-d9<' Xa ʣwzL&H߫ H|Ur&t:>jdԞ4ף#}]kJQ=A_q}6AVJͧv|*"ua,v#='KqV8 B4@GK57vE&ܐ߿~ M #ye/-5/8xx{6ݞmLwq xlx= =H`V!`1|bfe*9?c>MMF~-Г奸6OK-Dt,.b< f+ؠ s\H `YϦףf).4OM4Dv.φ\u(ǻIK&_d(a j?zYEE&/dڱI:V7rz_IL[Yy_E&)Ibl(; r*+& 㜹/aZp<>>]-[ bi"՛btcu!*2*ssh@Mco&:a}<+?٥!\ mm͏`|^>Os!& <|9xx ΣM_/ɗqCXgBZ7By t~?tlS拕 * n,\.t~ -U.Wݘid,yXvA.xc屜=)h8#hC*?G`fR.v>͆70 .l 4M/e:pFJ]iH{r:fTMD ȧޭX3 \N&Z+HePʲAtgLd?x%gPw/1ʷ}ڍ6q@VV0Gc<5&ÚS*0(3xxeD0bLuT>u&d]i^͉}JFel++ӵTmj UX՘91r&\̌L$(vĢ$q(w3%_RW>N׍NJ 錦M|idvsgO`c[;btuD tO2&+ZB0@=:a OtXgi(qb nΆR#G1(mYbQ | `\KLDƩȱe8wG[gu!\={wN$ɋ Y@21troty;vìo'Q|xсP&g;eD3wUEb+)!3cusLkISy'niǜ&)(!%z怒K;u1*S*ԥDJNSˤ"2g 3RZI%X7`^P,@2AWsŽc_?s`:UX$yBܥtLKkx-A ?+*cuMU {0$DpM#eYjq0F)kBo'p*$&.y6RԿzѶ5qe{肵` (\OR4cʙՒ&uRtlk0|~?E"8bV?gxjc^we٫'Fs鉁qŸЫjR&>A2́I'l~P}w6m;Nl#gz#ɍ_ yig0]x~X`1 Z%>~u)KUTְH%"ǩ[{G_Y~}Uqxgy kغ˖y擣 "kPP΋g/]/>{PPj'(B]r:XH^߾J4]N/ɍMlܨۇDN!AN>a'R]yo Jr65B5+h%z=3S2hL+5"lj :^q v]>i]K`Oߔ2YN^ f^~bb]L̪1-ou6Fmf7$4625j>{Z|Z3 z<aPsERL! wĿ0)vȧ/,%0CP% @/>o_R[,!~szaX?k |x]vm9 Dfzڷ%%sV=}Z=}I) )~#YAu{x8K*[9@Ї< 96̯7ׅ-_cbˁO5~ )5DJ}??Wدa%n5ůNF)~u_W)~u_W_ЃIJL}>\D#9Y6SITf*i3"JL%m6SIʹF-i{tLm4U*Lm6SE7{fh39Lm6STfhHN)'24(ROgJQ>(RO)kjVV:J7Yoow7%*x,L߭?ֻjןlp ۠ŶiueE Dg$#QT1L]?n?^ 17н_ߧu 96H EtYw0cz`5 | kF>jtx k<^DM €XFB&N63Œvh=̈mQ͇lh0?JB,?/WY b b/]#$ duDXUс}̐k>Hh7%재0ZR$fFt!A&E"UItϱC3L u\ 1=}N^RX"H$$w'c@Ք<|2sdT F%*xvºc!7'_.JxcH"H0Kx.c \\2/ַampQMck:xi s njт|(QcCΥ[!Z0rF+XW&f)=c9fC!7b?Jd< )iI< 0⳴@Qṯ3F r\󡐻4(*\CI a <`X@ءYb4@tAu؂o,#CGT"<\k$%i4Ivs<ɐq C0͇B>I稔ZQFRaF$ vag|mƇj>&m>Nt8@ 4]4J7'R{‡&|CX|W×ը˼0+]HirBe1>mB59l"1@H%s njт|(b< }C~赣{goi8K32dӐ֠;\1ZUj(AĨ@Uwz9^|dStj/VT>] m2Ii o_ﮖ&?iW[Q+;q|s׻Opog'0᧫8h9*ӪdD{9ٮn糛&-{JMH% J5Γ?_|pZbٙqe59fR䈎"<}}E h$pF}Ƒָ;h1ZV_Nbͅ%qȔ,qp" ܑ9:1:C!zH憑J; q /a}pi>fƻ].PFԀJ6ÁDnKJF;ys T ] fCD,: wⴅ ӳ}j>wkVʾN1kfzDY{~AswMRvt}\->VŚZ?ҕwh/c9f!r5w׏}uc}cu4;4` N~mks?^A9jKNыA>>pPǎ9j&q]{(ݠ{/':Nq\eVȳ.J4{"`*uscQ͇B.]MnP(i7 ׊qA7~ST—$P)p)D\pIO|cEL3:YA$KOV!r H NfE ~Q ,YZMW=۪'Pxz)w_W3zps$Q4+ NfƸ7ӽ4P{mU0U"Tc51xGqɆ >d0&[qu8ż,mj97p5V3[*I!7D݃/q$Λ ө} Ups1lكF܎9j>xV4dk{e) ` za.A`4$Sf1^YBVP͇B>O;OkL:Ndm$j%6'gBj>[)ӊn׳٦zlF)Hch TlURjCB_itAчթ`m6t8ϾHD]gd$<xstvVC5_au3Y!YsɼVok$)Y47n 9P gָ{hТV4w7$r>Cx^R#zeP@?/[Sլjʪ9PcG]*#ެջmW_|6ψpI8`HJ%2oA߂|(JUE1PY:(F6#bz"HJ 61lg\Bf Gwnٌ(Q-}͢ NIwIa $.!-qѯ-!P͇B;0>uq^](29IfB2 Y(cYM /h!׈v|'mU-5[옑2ÒDn7x!ٮ)JH8eD%m**)bjj>p]O:q 4*kby,Ld8Ctj_7՚꼖PkFQᘭ1CJ{!CV~\EQj-qm(AE e.rJCb*zԌM$oMsTI Q  u5K^WܛSYV*ђ].&ࣙ=<0:8JdH&i\ԒHx,bhFetmk>][_*Kw0P[@f|+q5&%{ d7ՓJ6[$֭ueBP>ISGuLOsyax]G=؝3wX>^2*7S'j2VQ >f$\>z0 g;)/Kn=h152:樸X#*[{j|+4jbRԆD'.LdQ>D< r4/ѻj>c=b1.O3rجd< M{|U/{{:ucYb (byPEp6B;۝cN!7#V%8I@H "!J7)&5łmj>j-[>TXw㏟>\Gݎ9*v5V\No>nye1GͫjSS0a`LqNqMq[1T0!_[oD5 5n/&r3w:S-UW 6N  1F;|(䚍(ey٬ff󏳛jl)ST2u^bp܊'Qz B=jq9Ow=<0qsTawbq.5<" VQ_Y:"suz/@gՔvAVz^ڥ„Jr|C0k#ڳ˩zr&!*@d~8 t;^!GVQBnG4;S3Lר/NSq ;fy7e*I-p0_v Ix Ǘ0>mkPGطywsW>H =8*;+UQdDKktQ1%qSU-j6Ų׀L5xV¤ NCAU+8#- -h2|q9S[\uC8tSk'FLv~X)*P狱#Ѐ"F6SŶ''G)i4IB?op@P&Wvzd֔ U+OmzG#!=Ymj"{d{6vqc5=^:S)k.]{z UCl2"K4$2Q>Y1C>P͇B.y*MhzG5mF_1[&"EQdvpۢu?ܶ$*Ʊ$9y-P%Ŷ»k#q93'@"2&{X6a!<҃?Gѽ?GKo>Im Sr `bnDpDp}9~AU1ݒ9@r.=?<.azpL7Ay7iTءK 2w wh@Ke6;QD~@#*Utӕ#'FdtĞ)ꆡăZcݖ<+<[dEon!̝/V\$ӍG%Þ\9uBBf*3t }GL =I3HHɕQ;*zR ]Rx )Ka/$ */&VQ@%,WgϽxkn%*?;qp!{]W'xF캚Teu$eiAx֕nJ*|CA،9@RwW%y@=&TEfb(jI=9 իM0z#RSp©U|ǟVO 01ip^&PqEٽ'Mf%v( e,tFeYT6[Q'6a;q]Ooޔsm(Ø?3!1a/#6f7ωXڒā镜1$ \hlB* 8L]̣ݍ~.ut£Q{aLӟj0e Ilr)aa e<$gQɷt ɹ= uYE>3(&LcZya.!gd*e,PU׳&$&bF> M ݎ;(s).'ue L =:(\Wž &)pj6Ev4x04H'(0q)CN"j>)Rt64ɮ:?N/U޴7wENi8biSD(C52t@|*("?<?#=pȡtÇUp vhH捏O#^6QйM\}pY }a7aӇ Uu=8쵓`>JD׾&“ h"Hj֖0:Oh.ct/P|!gՎyQr,ځ3D|D Bb*hwFL9wnj٠7o|ӌ:;> Hyg^e8" <9+ z>C1UPzpL\ۘ!8`cƴ1GT FA7#(\2+a>EҀFDzu>|fHe$ -aeP׷o %ێhlGڒaQgiI WI%*$ hA K NbiF1{;opr*7)<6$D!'e/Q|gIˢ<)6N,_Tw&UC:8ԃ X^a̢nay%= =wŬXLKTAԗeR~.M^2q5P6c8XΣo׳lWz?e_| ޱpƃgo)7eaN^Ge]׎al3nSVWRH }ػq d0 U ]=0x^-Xҝ"Br`; FP}r"TlU|҂hժ:^!}NX duIE$#^ v^,[Ep3 Fu{.'Ϗɠ6_fv#mM@*ػ va]mKJlnĞkma4C1o [sQvCGW@P]Fv72,f@ &_M0 qD4P z}(Q9ZCo8ph=pTyN M˼_7Ìp ܿZE}3(\ _A0Vi\B|)wEgY=/7k8re8rw(enQmLj7k8i\dU+9j )I#Vj?=fv50#6",ǚHW$T%5\`SP=E R$<9B#1L;~p2((r5 zOT$HOQMo ^Ԩ#xsf(2kbMCny7+m_F4.Q)'V$,{w>Wryl.rp**D#|c`$m1Vf1%:oP776]r afq(c5z)G]:e$ Łp|'0);U@3f `"S#B54:1D3vbC)&;ݡekι2;H#i ")PyTۺD/*dEO90&;x| (}1$1F0v-2$Tխ Id+9ST1o%Qtjm(_,Zl4.gD2"NLƏ\hlB*3b[u_%wn}=;c+MqVIw29pˆҀ2'5PFDIJDTiarJ8!fϗm byc SjDh ng{oeRߔ[Zٝ)g߾aV Β<{2laKÒ=YQΖ鬮ira-ΑPZz,ޥmvJOΖ=pg"ntDҒ]_yiTzܥOţ''ְ,T \|H8,`3:Bbm E)eR 3n /2jhmMO25XL[jȃFT]pgQJ]?ꅾ35BꏍJ^%4e@uaWs__*0ݯWתOYhm:K`{ QaAf:pe?ۑ0sybrK$ Tie?-N>_}l0?,`@=>ë`0w)z[_h}Eo?]ُ?yaS2Xx3q4{{0^s;f9Z%WlQ5X.P˲!WG~*Y."xVZ?ڨyW fKk֟|n0WCtW_,5 ĭ־?`L7­lel͞MmtYSZ@jg^/l yag`g; :mj滢T8.%͟5HL.}/-L9WiO3͓ob7 l  鱫G,yʏyM2_o"{2?weim72+^.ꫢè_G)qֻ{RX =*aIn>X@٦묚Vʹvl-rUP^{? ׿}t.^ax5{ zkd*ku&"sm>'{)K6#bv!)60ۛ fzvcATcm9brlٮ8 ]"K,TzEˌYzU},'xv* u ڲo_q,W$ksOQ7d^p 2ewdjE;ݪ=cǮ1] {1RvW{7{ ~\OF*VP]QaFrT%0w/@sGXz'>/.%Zպd:-ntCG2eL‡uwuI3RJ(KJHˤ*T}N >䥋H]T0wY)LRc# 6&5.mhxsdtMV[XGo><^X"x.,1{/,L%lwNGAǁ&>CdeQMLf1D < I;\ 2FSdEn<$Pbv&- )}n%h=;vEm#iܑp&;m&|gN|=y)ʚ72Q=ӗZgK.(0eNZdGOfM/@Cϝ#)1—DeKjRĻp;Az[L Dox/(]Cvtnma /:nYLGݧf.V l7 VqaOLVfEKdCqi}> A|&&t;U`EMF Is+]}sjG)b[KE.S1NhB+U+ .}gdn9Nsu qV9:PZ(ux#1u.AƌRqf:ˋ 0K/'Si"p>(cviK\ ,d~&i1穉d֨t I~P}5Ek" urd~6DD&Txҥ#W3a6F)-*!SD" KIfnU]6Cv%Pwy*;jɧŮ{W7|rRsۯA?$w.Ibz/ɕHţgt|!hk0irQh "UU~lKlyo_^_ n A1M΍'1ZWU~M{HB5Y*TPzS/?α,ۈr)U vhSqUSLy2} !b0`$;IvL:L,DQ/^%ZS'7B4!w'AO/tau9m3#2 %ya5DN>s )MCn-v e0rseΗ:y~R?vBj-;I1_TObN(eoPG&S4Rr88h$bgݤAKȲAWTôۅaϱHKGJZۧRP'b?CaF[ S6qkpL_zU*:/DLzáՋDb93ŕ#Z\)dҨZxYGW;%7N+&LKKN@c\c 0dLBX'vRA%_!Ʈ݆AnZ&h7I[14%j,tmP)J<3<^ԫtgxٙ`83[t:9 E?CfeS+`ﷆH,n_oRqmFx%.Xw%z5ɯ'biJS]0*^^.!Nh䇰Ov|Ii4湠LbeNiɠ Tz8)cn/KsZGgT`[ $!ܑzWR|0[M߫UF vyj:[V<_b60~W,Y]|bɋtVj &"khm x kU_\gV|X›/-`yWz-IU}'EC Hס![0/o5;{aqo-Xo(Lϋ6C^oT{ssmIIe9½pj+}Ф_f iLsԿ]J7erXq ɈvH(#`w, F~~;N1jF A cGo}֓hFj9;v^y@xAm3At}++cvf gFu<0K!-KIn&TFd!o#=v CtBEC|6mi?| `&LFA;il 1:%NZINMObɥeND 8XfBx8Sao[<2D{xYwa KXAfx:ۣ!I<,*u}j:AKN0M8쐈XT@I{n>T( &;?-;=]o-m7X fx _MK''9^UJ b>)U_3`{_Z"I'"X-)L,N~fLk8oU2\2|L}ngM3-Xa( Z~]r5SGv:Ԥ/usWm/NjP;ןƾ#@U߾?m'6L>,Ï덡&[MU}u?/fVnQ0Vn Ųe?MA7?npߗ]ɀ_a䱟O- 7]|vtLJIo|{?2|(ʅi~X5ft=>l3dUQ踟:. ^}3h=;o~ d|JWkܨ55.cxVӢqgRIWdť#-S(aG˙5%s s`c*}jf5;/"[U<> _0 }C2]?[%,Wd+a) Tzh2U7:LG-GF'{ӧae5(GEӛ)Xh⇙o7uv{mv̄e8- g|U;X34{FoU X?z9dY,ͭgk8uߐ%L't `XW]eef-.tvhgZk'Njqﲿ Ss&I2h1[]pI,+]o50DNM Bj|yɸ~;j]BUv?[U3 &6U|}\SֿDaʸ/^ieF9!ɔWтX)6d@L5gH9|R Qg{RޕQj_%az\e,2ĕ$,ϥ B[h 9/W$ꦪVTIc/vO=3闠\Oσm9ІD8FmOFA?lR0Tw`AaVzp?p~dA_8!&3@a(Ur3\, ,LΑeRQ{OĴa6R+sʶ, %TFoxb6!0ܺz<8MNF>"8bh1,$ _o{SyXcqu,#FXU6=M`< 0agB@ۢkP>:?xL8$T熰xRtH&i#8㪲K~ UeQ'{OYB!NA+/pP'gcgC6ITg9rմ{`|n١|gpY|j1ȲS\SV}NYsRn~80{Hm$ mʄ $!,9 \qVV^螒dkj_w!FdijL"͵Q2&5 1Js… Rjiy#WA|`1(\rdI9b1nw, ^)m[vDN^1dn$X_Woc:Z݅pnh#}ki0xE٫9F0,Xe@4#ᜀ5\YՀckkA u#q8?w"b8ʻB؅!A+ՆDf&@…d$:Ɵ#EoGP⟯,_,:|cngmw4E APc Rcddlp^-_ f$X/RW O%o1:M父kx勈cNc=4 AvAm<رL|/!˙E{LT @^RϦ~Gq(H|[%Kb Q "wi0#lBۂ24 DO ~9 ik(\YdfQ*!mJa8䦯;5<Ҳja FzurX0h8TF Uvpf󚅿wʚ57QAΑ5kTf*Ƚ+@TDTil3ýPܖFap2$n<!F|;F CܺiٴܣDeP3YE7_ݞ`ۚWT2e V2mC}D Ãz2He+p (G֪ji8toiV%P+jWfx,x(,ΗpXk0,$a4ۆcJ wFTa&(1~TҾA! le5VIcrJR! s.\Eq}s"Ap]OphxAWYtnuU "Ri_#;_Ȇ@/5Wh5PLu .Zy@0^i:g{TsFap(7^tA8axRB// `QeTFRp_=R;H: 9~: ehXE #lӒ!f X0 PRf>X0 j'3b8 DQpDh:=*Zqq[ ^]vOx_ Qz>Ev:7׏^ [_YPeA( ֟oRj+ EԌ;{( ΐgo*2ԟtXr2|@aX4~W=ߕ=4ʂC\)&i`?@K81(=4i ID,l.>z<+*9LSqzM]*%!S`Wm4̣^g5jPdFY5,6`€"'CsJƶ5SFxYlTV8>l2Ƈ[eYApb4(gkTy?_$U| ==bPQЎJ͡9+P=|Й! 6YlZU & z'2I&{9z4G)w6;ߌ QKʕԶ!>?k Z,H=Kh([iQ#`+rɆFYptUeK:sVwY0;&Pnj>MknK3Fip) RFjNsdǧ[&Q9#Z&:CS2Gkx#ưrf@ *bg) CndьSU]]sۨl.g6.*qQUg<,UD5<G m2(;&;E:;;V.!,FapYiQD/AM.QP5:u Cm”ƹi ^ǀ6 lL^9lἥQY=i|$g$q$%JY_ythdԌubyXQ $X9eEQAgҍt%p j~=z$gh G59G<(-C08e$IJ&.:AQT8FapYDUj3sks|Ho֨Ƶ/ѽ'a@=~h~M y"#p pTuXQ_L3Ǯ"C f4:rUikFapy슆&yFq#F`#R@} 7'u2Ho!8 Eî>( N=7J?vlMmrCg{{X\8<)/-롾J&d( nίVSՎj買va0L{F_qզ?++5z:qzS"$]TM b&Fni1?@bI2Qʐ>^Ţ7˸̝2s0 fF=G'i,}|¸0qMvSMT QCpZ샿ykBp*XLqL(aC082,nj1C˙ӽ[;w F{Ra pqmGGc㥻NGQz\dpL&Nܮl7p[dYJh%U ԹQMƙ[`,k'sӍ[)t1o^# *@vnz7oV/-^"wZ_ڴf䐟_Bk 埖~7M.bhwf|/ftkN6“'kۿ4/E=9Rxb⎧޿/x ;ͥ_Tqv͸@]}b b ]z )204}TzlAl׿7_Mfr6ŦtOmcŠ’XT_UVg贲wyz !EHp@(5ɐ?!ZBq@2t?T ea  b6sgA&@zvL RkSY3{xSHjڴ_욷@\YVFq~6|PP7M! =lίN ɎRX+0 -[}{*'|ݓÃo}5m>MqW#eH 6pdWʫS~% Zb.ͦt oWOlGI>$a;~8؎"p.t~1P|AX^G#=vOŤ1pEXIkWӠʹ t6!ѡZsnr?B @o5(?޻>J/Xl#[P󟼛Z~ԿXF~7q-{l>NDሓ|^IH,&W@#,Yt}onXmzmq7#YvvQJ؇7)n0򘁺^WYn5j'fJkYs[Zcq.ݵai1oб>%6)ܑθ݀I(o!P}7YuWEwПƤh~>IH0YH&tdH;'<& ^O|?[ŋIs}έic];_dgTd=ëE!z1qNʹ ]|^ߞja:>x5.^WklITvG5nq5\Nzh 2F~}%I#b}Rpz2o)^K҃r];!" ,+GuRj%ǵ'!-9C ¼&fOmiQsD(e]-gxzW a{޻'=; 7Of9Kw0Mip;>Ͻ>o.ǚs{4 tim~mYpGC#^gh|7Db-!Đ$C!IHUD u 1k|7T=Í'D# =BKYLt+{#z\0N /Z|- ; |UKD`V p>*vK OJs(vK;7Svϵ+3_/Mv<ǫE9 <.D71ppsJn᱐WR3['Vkg^2*OtFɽi} N#I+Kޞ3s?d iB I9qW=|Ŗd:3鯻Q5G"ibŲVr"4IƢio?L#M}ct֌|Mpgw#ӹ7a*?^vOMr`'@mx %A|~џAOӰo1i9 =:B2qs?r/F=zȅn&|Q*p 48}U|x?uH=`Y_\_'Mϟm|Aҗ7(Yڥ7W3w׻GߦE!]Y̦[ewKsjD.`U]z5{ܰpՙtg;#rϧOhC/oڽz*W:gy4U:G,v81޷] rݝ TTyMx~ZE=`) `u~=yM:_mrN,>;9{ggp3*r~1)43$Pn)ékK(^J]|yJ缗xkԩȞj)ka!JyL0  6Z dYa983$.=GB[mX[4? 3-ht>GB ”OEVSZou !'0 KuiWG0򊤶Z5& nH7>CB  ,7>CB @di<`lY˴~Se5#t>GB?> >GB T /' %3J23$춴)4>CB ~>'Ya9JXτ\M㖲Y1H7x99WO#%AgVn Zf.GB #p"%j[I;GJ?,x.>!"Pm- *B!xH3$A Gv| 祧}NvJ2A!xK|NuJ*U:ɠ?>CB/>bX0W!%\hd`s$/(WSs|%eu> %+FdˣKp.B{ʤ7Ry0[1hT$\4 ֝Ю۱8_Nݴ{{Ig-1SE#KH(,^Qd (Ձnka0C%Ogr (7>CB @ Ka qږF&,W(7#x&Pq.E,kZ!L`;>>CB 𼟼 /H(^Qka54Ӵ0`L[1`d |%+=8:q+#>9*ZR: !xh` a9JS,ՊI-;'3z|"%/O^Cs$B5u @ V ž.}dl %cZ?,xBrXP )76Z딴eh#GB 8xO1ܵNu.A)r@^͑P<7rJϐPFC4!ҴAI0s ֞#xdin 4%@͘^@XE ct>GB km[,d[Am%j)Ħh@s$/9.C}ڔMpqɬf ϐPG \ (AZx8j %kRH6[&h-6(f8n1Vt,fyRWo_^.mwpch]qOxyXW)>>#+o%JNҿ,I8s)wݵ|f〯fߥcy7' 8d[:.p6hwFd;ķJ^ݕW] WiCH_<;C.9Ds9jèj.ŏGmc7z޵[HP6scuNXOy/}X9.f}N!(/xk1T)b+t@HmojRz\TknOט/^H#v*L9u&_݅odrhzz]Jԝ"\W|׷aU"y_ xdS귡:)Ӕ.|pWciӑ^\YTc7γVa{^Xit%t>_[}ssS_W$ rα8yKe LɪcMb.|ۿ~/R]̀\PG@>٦x}؍TS73-?${rf2I ls5"SfA&;lDo rya"OGZ2_F$h9Y q|O3j~H :%ā/aaټ ;eӝi]}JTK+,GJ8mTCU͎'P s76SmE~ZeO@]DOA@(JqTQܦڌ:2,"#aREbDlՑT u$>ў8 vY ..]i0ޖZ_<Cߣ-wv_#=%F OBBp*C[KiFDoAs_)ߧA6&`] SHÒE*L<)"SDcK9e3)$Xʷ`FF. "PmNPl]*o|дmA~m_TTT{CxtEXnW-0+p=R0+\Zs-o%^XlQ*Oi`)$Ť{Fw}>=`DeQC1R':7vxE~]fIo)Î o4!Q/b|TϣTOKD;0KMԒzΑ8"yRPl$Tcuiyw*&M)ReI-~"0IKbQ]J .~ߑ9gt$ *fBAd0T2Wۈ4 $cz@yDAk\:wQɱؑ9atOsB>URI9rN pG<"r`n)yJS*9AI'_hC-FN[k%WɲP4:%{xLjJq:jW|2OkD~UWjPjӮ`~LOL5YSI'{yJ3==uTKb]7'֓'\e =eDI\(wp)Bx jg>N!~^1Tk%ikZ7g'@ooIWs?qiwp m4z{B֧vRGz4%*N6HZ-]ƕ8-4$F[fMQCzD ^|f֓oC<={+P̻3lPCn#cՖԘͩ=\/w^Q&73QF]+Aj3uN[fy{##eˀ8qɎ$HXp) I8FҎY | \˔KȳsgeMoc=ll{526QWY!r[Qg%cׂK_Sraju##>]%93,W.]يܜ]9sR1Ysﲨqu>DĔ xh@{s>hr6^fHH)9v!fEwa,A#E 2E>T MaU=LlJ!-a*HaS "!AK0$$(~kz(הH!"! k*$)4nE^($QU\uL!S9STd&ȁq19-Y4oR]T,$4.Jhh;cTup`)S^uewֲ9Ājd4{7 &*35X89p3j_&_őwm_2'l۶n=nm϶!ywSzYV6eFME:%kq5izyh=*__1io.:jhdw{ON I/.8 )gѷ$GV!5̀czH_!~hmv@l?4 #OmT`F"Y"HQNt"KY_DƁdrPW?Ύ}{Gq8'ǯ 8SKO?r3t뷑x|77YDݻ7nX+ɻumлt͇#zR!0ČNhuZ6#f͔fzp3}\ N"(\Lж)5c@=ހk8I9f㻇ᤩ5SEZ$f>v_½/D5i.!N/s{x K5?NJZka@39>v/wmU;Rԧ醋z6u\kkCd9Yn3/x1s/ka&yy3\M3хbĥ+{;=-|8f (uCT]ȣ&ELަ&@HE39(dyt VQR @j|sWL#,8ZI}AyP&ǽ@= vwនvSqfU{}O^ B;*,K٨1 P q2y(sn<9qv)sqwb%ZDZ/0WYi߯flx RTHQ `$ b?J0WGh4,!+L u7W͞Zݻ?_w @G!5RY௠ Mt|wo@@j 9yV{x"$;]v:Or6BcͭI^V_1#me-G'YQt"y:! Cjŗe ,# R"%1O 3â`i%1  &Hdb1cPh=[Ƿe_s kt:*lHjfz3HHvP yaLNR H`!5BcIaF)pB`8J$Im "?80q@es '.L J.bL9sZhބH9a:0 G9NP/QNӟ45`SsŸT5QqIA`#pLsf?? Ӈ#wT[LIxb% Numku/>Iósw֫&w_5-::m˥ә.b%岄ԗ˒rYR.KeI,)ieCZ1-dId,$2DƒXK"cId,a).%=XKz`I,T* bB)ˌUkYZKydֲ'2Z6>*\DRRܪ/rw|F-'=sx 繞+Mh0`x!#[Qlivf'<[g7j먫wj]ּL9sUg5OwzL(Nw'sj{;N~Q+U5Zr{~t̞lC?Em_8xr7?N[[|Ӛ" Ӱx /$X~1펌giv-5fFuӴ8Ki Wuir]\&ץuir]:&ץbir]\/MK4.MK4.MK4.M/~JM}DT^woQ!w^`VOӘ4<0+mCA ƙXc6.=EGqNwA%@_u# P$tpB(\  8k!l)G 608譭8X?^KxюEXϡpeu?ka(ŶNq-J)/2xA"^Z|T>X"$)tk%D6ȼJ'0)&Z&I`wqfArI ƙƹ LgKʏYM~FF#jL\LU8)gÐn,} iEzkT_$k(')G`CF&-ULѓ"|XPᓙoQbKYwCQʤǎP"6z ̧k[ku1 fp4'K0lN#**t 0Iՙ'ᑪ~dtDD]{J.jHxs2 @7~w޿3̻7nXL5EYpnjlzwi8Tw<1y eyS"^|>n"qh4&*>iBNn9f! w?{WH0'ۘ}۠es bU"U$%U5EܒLI&+Ш Rxo`i:+,8 n v2ӫ+:c!N'&6OW=Fi|2*XO]|^f\)~ Vw8HRYfW|rEAI7=]^ VC4sf?gn>:LFOv}jZu:ST~g_,4ϻ?F'_Es4}S{:WYlLs VLt e\_ERsm_~|>yw]zMfV9%[fH>0~S>w/x!߳SAԇ0W5K z+p7jKC]ʭ/avn!0"آ+㏲v2;7"D]!UFr7 As 5 i+k9x"0-LOY ҆kٰ՜KO jN Ǟ+ B&$i#(rLsf?SxNjG  e~\~4 W |P9.{L*JѢ*ulcpǃz^ewxrq+w~h:Qƶ +!-%0!-dV( ղR2z_cc`RlbҕVWpb#VUP]}cx֏?꫆mϫo-mڵ] esAxes\&;es\&;es| A\!^hW K(PWJ\q&yű)GNFnJXӤ;4,N=S V~pP}eN) gmzv~nkbU^k8d&#QHYu2Lw9hX2bGk45[hr֍Zo'♹ir*M>KZCR',Q3' qf%*Ű[)r9ь }M5S > CWb 3Up5JuV ٍɚP2zE)E(#MYp).9ěUncL)"kbtһ_Rt!P.p\t5nM vQk %I/P!`V0/2ZDJGt`ZD"rR;aޑE.qVq1 A0 ('ւ/1 Ҙb)2VddL+ jbKFb$S, A9p0I̥2,V$HZ6h @(Jz5#4jժ͇/56@XU,(k淢ݚ3z6i@߅_{Rz47;qռyw;!ݣ L tߎl ϸOqLG=iG?SkxƝB?gHVcf͖@= V5KiAx.y0*YncASnYvƢwjQWmJv2hYM:g{!D"M^ʹ=>a}{VG ͤlYʮcLIeޗoM#vH\>>J9_%v ϸS3՟&gx66^z$_QڃWsPJǮ\I%37}z'/#@SG>е"P3K#*Dw_kQgĕ:Ap0$ؾdz+R2:K^y}.=S$pztLe uKs$y<9k֞qk^Z|jMn=8(DVRJsT#BJ'0\0PҠmPVS$ Lc d!)Na:g2ʶAЬ69kVF S7'WMi7VS85AǗ<Nk8<^^(BmI͔Ke1cFE)FO.PN\d`Cr%ErIAIp8-  Fpv9|Q{ĥH1@jj= FXDF4QAJpDHETB@4eNK93Q'5 KmD:hD QxH'4(87JRr IAU@Nx_*z(Z%xVKkmT&j5*F*m3#1g&z@* |T'bG<ΠaLࠉ`2?`W f[J0 s[G1 FcP&YFC8c5vF#21DH[e(]BnEȹ'3 \ _ ;IDd7녖0_Ƥ0@5'k04*ґCΕY:ԯ}%̨9 R_Yg|Y&.c}맜,KSNj NV %J&s'EŕeNF]z6QcmG)1ҘH liF EGPFO$*<v*(1KT&wC6PW =(+L/*º{en{"ե)soO !@O#,SےHQGC/ e9,63$,t7|<^:- /*0[>]Ņ y2G>rQeFi6oZ#P @Ϲ'͵N)+e|wPwPjָWwr?ˆ^FcD2Y+냉KMP` Xy$RD|4<}z]>~[:*)=H;0XịU z 62( F刊`) NPB29X84*a`W: Z2Ju*4RYVݶ~چ=`4 moA4eǁaբ4ªӜE Y/sT:X<]E(ʖ_-zd/+%BuEF밓44rμ(uR oVOoJEXʝ)\AtqNp$T]=d0Ti?z2{H$h{ d{Ѿ@jx}#HH%hMV֣wjLdgwo܏&CfRw\Y6F| FxcMxM^ /vOK-K*%<bEGsZ 1>uRu؊ 誗wQAu`;ҳ )Dq?d e Q&֜AƅMHj`̟ePJbT LvΧn21kM]ÔёW\ʻǤ.HD&Hr;ꡳF4E3~O5ti찱lM_>6Nq-p*G`ZH"X ( FnV:V1 &*P j4!1 2ˌsBҡ)l죰NxFaG&gu ٰ``_"켡B蓫&ƟL^iwe=H~Y`G2f~0xV˭ʒ/7J*RRIUEu$3 F,.s-]/?rsG(>uH ʀ6TRRDу28~N,pPG.O@*ŝ2YS[ 凷;Q:HH0c4>o'sNNQ"D^P'̊Z/3Kq& (ȑjPIEX|&1*e޳$IǨ.rb /bg-"QrZAoԼYy, = =÷N(\'F$) UjPJ(AӮB$+=?rE`ZQιm2\>TJ\9|TT@h!MADEZŨT'>]_ܽruzD,p4 A2(Ii$-P(8&qpLS9];LoY0C\pBa|ܡVB_d0u_(^+"yESot I'Z$(EF1ଉ&bSLCr+"8ɵ >U|j=!@Tq&%qidJ9PnRkUJLG9xNavpl4mfI?|إOt\\-ȉ 58K#VK,^SJFi*QYGYRc oIEDje 17{fA$сcF P!2bhjWl8ݺBJ&JPl=e W]n- EeGv)_c~N*ytbINXхؑʐзC|(O ]][L$%P&$4F h a`E%Iǐ$~378 >ZC*ʕcXy&r NƺxQӳ5!զU0Ah7|)+Ƈ9W?{7~;V@ uK+O&,|-w\zEًV3:ՔVJuJ Nfa4v% Eȟ,x#<%qֱ$'L%Po@ 23 Ņsa=76pّ6 Ld`SG'/"x<++@x'9EC?Y2,>7m~4lmٜ-sIirn4 Pc?ԏY;2_|hmoLVuD vRסk64YXԤ8N \^>[ ϢnZ96W/?S@6 犚e?Qob=Wq8 z?Chg{71V>q|ʼIT&R XiĹ@[6X2o%9Iq+m5{T"%h\)[΅/U:υFɕwxpV[QȄQh]P`Hҹ_K gϸ'mzM Ի-gky֣}Dt jV JFA)ɩ5&`R>eb@D8aE=$lЌ UKfѺ iRDdM1 igʯk:. &,WgE{ć+rQHY҄Or(H6lL\PxӹN"qO`*dFtE-7NoKq'ADιkR#0zjdt =-?ý& ݙtՔk_`|c)WmgYcZ(dZ+Ri~-)x% uDfAK=Ig[Q60 ?q(zm^ y}A_}W C@gYyVm.Q;KSnC͘ђxCcx 6$lٮHVP,iKw~͔a0t5m*Mg'7?J'8h-*6h5Z8A>|8H4G¥<14 <0Mqut̔nۈ>0:z.|BID O]㉈锨ăZRpc)T&^7@»#vۤ3dHC׷9czc[%{X\fAfY}!9*a75qD k& 0D.%)kvjmQ|oÑ>~N;O{9bZQRkP| A5(נ_kP| Ar\gt(A5(נ_kP| A5(8_+jP| AUT+[+*jP|EZEZEZ A5(_ξwcb$ *WXB}$_B}-w]ϲm}`kwKP>3C]-]cAnd?i]`3u|aOuVjuͬq';:}jݕv-lXDٿmEƻ;ۣ畖a<moqu{!ўsGxӕ]uv.vb;_:SyR  z߸F'p߄I-䛱y}º)yu.㔡^cP3PhMn 65'ӀּA+|LR|vNu?/B5"GkX=RL&SA(OB@Hp6sb՜_>[v'Sk/7wSGdR[1Yp"d2>r|ʨV)ɚbmՌ~\qvG#3ޕ6#鿢,Q`zyib.d`J%:lSdvJ*E2 FhBB`eJ6D1Hɬ #Cm3@9ḵR(8 rJf"$-*3٤R>I欬jg=~8{zA&t-|``ҁ :KVKC! %A*5j@D`|Po "sE>')2Aag㲕h|%BQ1BEZըV{?WVBDKF;d"5s )C"Z)xV88(I޼RmlM񷫑[{;H?`ƴL:Cg->\ISS xPI1LT+Аsh,%$yf. kz蘄{[jjW;ݺBze8Jٟ\|\}p-;Dllo~w4NyەZ i;:;RYu0v刦D _kŵt|3AlHZwY$сd OVF^C䡎B>8L(I^ΦÒ8Wa p`ll/ڴ&ĥ~jU>ziK^4(Eh0v<~'S`i0X4Mr1:>ڰUOEX꒧vH ҢARKftNm5RRfP~fZmҹ!Kw0a4ğ:Q3صa![AS(d`metl[/eZޤ*yHhg뉏F#!3o^ߍL_П=gtw9ͺ` :)tFBt.2F/w3Rp`Z$ˮ?=Y{Ϝӥ˥z8$Sta //6^cћCX$R}8kw7-m: ^ s f vπ|/71ÓMe-W _nnN7\C*:eyZ0’硵HPBϵ))Bub@B\< eחvO߭]~lsJulnxFwirj܌˞7d>sͳn݀#A00HW z0;ŔԳLjm|wwpQnvNV5t\ٻf5;%-4D8\Y8w[ҌNܧ1ţW[W~.`kҡywi^}jU}HQxw,!1IbuxԥJKƺt;ik>s ./}Jm3cQp8VZ86pyE'D˝rlO^O:^|{Ǒ-rHEe&OdDgmFVr7E`b%Y70kf{Mc!<5VIh2Bi XPsMHĂ JDItx2Ҍmߢ;X^QcGbs^ yL`L[CInHVgJYlf4HL+A+"1 ": 3BT̈́TPF$Y3-M.H;~\ql0!R81ȋW*DFbi2!X 嘢KY"WZ605FN=NE$^82I+yD$!]6Yp%9XX$nCs8'UL/Y8b}k.Z۾֑q5$Wi PcGzfI)}ΪκmG*#O 7zq5-ΔŮY K  3+bll:Bnvr=L ʜ o/FA.WuYuaN`b;v@i]ǂuΐ΁kD^Zj Sī<&/03G2+-sl\z,qfm˶&A31r[~*n>mn ߨޱKAӷӫaΈ~=Q?x!IvIAfУ{}p=J5k&Q_zqѸqQ|Y2<.P\)x6L^_s*=B{Uyؐ#Rgo2u 6qqu0{x43}5L4(u%v}/tŧ~,wf|}Gz`<TuDV??񪧌 S_P{~)!*$?Zx&խ 1BHqՠv_"P2qߑ+i:HNv^ )΄Wbl@YZ:#a/Wz^D5P܇Ll1|ҥcV  L٤I;~O"8>SI_Z1gr~H&9cCoo}6"ºci6F *2$P1Ϙua{`.5V|田 /4_ܐc%`wmKGZ54qu._\k7|Iwݟ ELX׼C 1AVrE%14.3ݸ((qmX]v@pyg :7^^MM:@zX裋IE/H jaYM`U)M 6=Ь;BXF]9M%cʁlIbbA$LPdk 1lv-45#YS&mV˨A$}0TR\dB :+@K9Yc-F@"٬ZJ{K']cv/SmZa ?QH:RsRNXZ m mJJz7m[ɶѨdݵ;YoKP{:7;n=ߺ3֮JΗu7&'\xϋb¥u7k]~Ս7";vۦѝ(R0rݥr﵍w8mVc9py/F%h.ho#]y|1b]Bjs_ac%HܛAfu镴* nПDRn]n?~~<x^eT 4`I\Q[ Be.H:LfV={d/nWw<~< n7}jvV ~~Yboy>h7 z+kwI3bparr\k!0(9*Q=O3M~ti:>6S)SF(+K V r!_P&:3o0{ʾȫ5/=Z _&S%rՁIPX2ˀG1rNGyY佖RH N׭h Sʕ,+L'mKDը)-yv]Z"ShH%rXRyWTF=;z{Q|VWm#:fVlTd)DhC(B.NkѨlCn5vjƥͻCpOjBB`eJ6 s %&' M]V lUqXvR )e9q%zF3AXa֖IhlRF)$sVVY쉳+JnT|*}``ҁ :KVKC! %A*5j@D`|P^ p^ǖ$Ef؂7(œl\-Ծ$]!"Yt9*FhT a󓚟Vr"X2ځ$4K$L]zoJr7QEqLjcnSD&VԱj=Bm@ uUx7kE$1TkR& 4d޵#bib o= 0  d"-'buqdٺ:$ pKO|UHxXX]U<}-]/-zr*Tgù .il-`l^%yIzU$DU&:QM1x'`l]7£HGoR=KGxAЯfX9WȒn,r^5;]:{Ԕ6LD?Y$$ &GJ6A-lz} +w}}`tvC@x' vɃh3 Î;>,@lTЈE,ֹSN@qv6YAʁ66jND٫mfٳ$s1iЮlv -ۡrZ|\<[vxlTy_־uЃ$ܭq@GGBG}>3H*|v9܌8OMQÍC:\. &->O^},KTrEmN5].Ӏ/?-D9j|cݣ=_@yU]Ԫ11=C-fj Z2FS:@#A*2=iXʀǘq}}s?rQ2DZ]>e?4e[M0VNŨBMPŏX"&Jjt\#>2PR>e=AʱTCc(xŤKؒma#$5jE݋=-?7ZZ Bʿ^OSr_?ȣ@u(/( W'jR٨kTrc6bUMї }($(刑X+ysڇf{RۦaSMZ\ᢝ~Zf) gGGRLDPk<)\5{tjbMkv-"qzh!n6sYn; !E;ӪZ!ԥLLN JƺŹn7Y-A$B0 Q\KUzBjMz4 /jL]3B̔]!oS $y,h*I8RJ8Y֪ SO 9) LQ|6o:MLଫZY㯫#4d<4bBy_~k)k~HK7?\,j*nk>qE_hi%kMv18&Ye='N: -;q9UоB.(ISI ǠGeJ<(Ye3?)Y{;o𭼶T0ٍɻ?k:R&89q !Ks_KtxC0U<"S7-g A@VD>q(M-?=}hɊ 嗀ۛ[s?oiOr\N OH `O0W]]ѐ;aRV&ay;lj3sF?4be[U66XXՌ9GlVBZh~AFc)T,1bH}8B1n)#Ie{J_1^p?|#U[X\1~] lsN8/&1'U<.EB2d2 -2'X|兎G_'ol/ ,Oⱈ/ʀev8ZHD"[$x{n^Q*bjm0!XsɱTG;k xBٙHϟ(L'>D16ZjmpъQ16$i@NdxL~I질t;yws#|m,-(*eՉR:rlhpaO|N|p8Y}F] n, [FƫĔ'F7<>pE6V"b[WG$4O[rIQ&7gv{us='O<( bNJWhUբc.(* Q$e*Q뜢&dӑQ9&˩V]CʶZjVFnSQRbcpu5T\TtPi,ɤ`ef*/Tĥ5AXɔcZ=xzQ5wb:.c8R "f5aJ s34k=Ԩ (vr5Q#CddpK?UV?h> nP8nfJ>#DRҽ2 @' #bV?Pv줫b^ϧIapcGA嗯#y_/w=J6~.X:@hM,WJFk @KONOғE_8Ws{D|>]u(W^_h[o.H`k@ ځݫϊM*pw)rws{Ǐw@td9ޡll`0BVK6#Y0j#0)1آCVU*kX7ɲPSy9wlNCGĘF1&j I z!=QԱX"TtM%jDc{E*?cΜћ_\ը޿w ENّ2u^.!x`noF׌2:)uӁU#tqf:\Qo|,zuZ[=.\}tfkL 8Tl..嶃w&ATK1NU;yV hPŨ_=gk;?* R Ѵ旂-ESzr@&VHƖqƐ˵T{M:X=B@[xD8f /f 69!,}vb$rJ4!1 G|}t?LDXW#de1:@r%1952\mZkMA+XSG]8|(J%$RAS BVñ.}mg;Xk:ZR/-pž\ղr}ofUʏO_guF6}LxM!FS U~($.h#M.i$TsBoN9! Y,$Eq6r͎UjqY9H!,-k<)-\tVT0dBA$e=. 5Td{޶ܴSQSX KFhxt88~"\QŚ]o#7Wrlzw$ .p@.8D#9쉳e[dmklf?Ztw_X]eFI4P9K\DUi0BE5z3lX'=a741& 3$ ?9hr܃Ґ^\15dV W\*8zGd7x!MƷ^dXŔm&S+ *ksi_)x̕6)xg3fEv>AvQޟ zcC=oyهԌB ʹ|Y!ҋ[ڮ~=[+}+rhlt>ATy1`^t9ٯ 2p{v7\ktYe77G*KSR%J‘ &(Ia!p0E)WGn ._L1~_n尭@FnJ{;&525+.㲳+27 yó݂gV*0T1q+-,CU{ԾAM ,"{Ҷ}ȸŠu+Ϊ;&)w#^%&ܖ?]nxtb19i:V ~}E u^8pih2Iq4(%,bneߤ71i?.XRs6E=Z[ܛw5֔u!Z_Z_[%S_շ]#_9*ʷ+uJGgs D' qcPĸqT("~ⳁ;veK93IqUʬ[Bb{a!+? O/GRM Ĥ(3$l,2|^!ShL4V Dv3P@ꪛ(ٴ#&4FF#CƣȚ43R@e&Yjn@V0=5i _ϝ OCz$B'm>[X9:nrl3?x4=$71rs;_w|ǐ~9߹ knӉߋ6ZrZuڥ6I2YQ|=ֹ|'>LϚYnϑ##7һ;X"16,>苌pgLt>N qYh-6tbo` f$Zfl6#OtҞq-tP=(߻FAjG_cZЇ,ڤƜYI%ߓ-ַzy[dd|d-kVS/.Z 7|5S:7}N (,Yɦw )T͂fā,CHCL3 ^ގ pg1{U vv)f^kC]}Ͻ\>Y7ͻ1l2isV@gppM/DnI `?3_7sQ81!AO|EZe_i-*m7?dz*Z=GC{!nlo{|+G1k`#Rfkvƛ|34uɗ0gWkp-WJefM5q8 m&dO84&a%S35 68 ͕G?֧yBs0m~eCi*ƬIZZ1UJjS=:\FqfUK|?)R;>#G􊓛@+M7|(4>d}e.7;ݳm(wʌeG4: l1e J! s9x'@wg]W癊j$֌J4R1C%cpQ$Wk2ee߷NQ1p9!PdQg+gO < RKk7Al7rvCt~(2{=94mD.ZWdaK>ܔXݍ hg]ESWtd-g1?\:*n,rm)K.Im "yL6g/2sτ؇V|)ꯢ-dV?fEm9؝N^Nl8ݾǟJʀNH+" `$΋"$F =̬B9T䈙ۇ V9P BX| VsEF))SpE#PhH)j"d"xn**^ƜPdVlJB md WN-aيDgGŎNګu-<%q]j)Vk2 ~.|/(jN~Y ]MG4yJ- w.[ mlӄC.2A("1'%ŽIt_:,+C:yGW?gڱi8\>`nLK|#ט'\!Ԓ+)vRkؿHul;~ny8)dȝy)|"Qp6|:\D]vLIK|x>%9a,>̖Yq]fh_ղ|̜VͤÖ ym/uU-4]|}g-ܨ2 gtn׽4_~v+&i,鱲O4č|"KL4W'><#ۭT-gq /O/K^Y=dRRlk\:[sfא LXlbb=;1eU%4 x)Rck^ejWbEizdWџX^tǓNZk>`km }ew&ݞ ᘮw3M/;)m!06h޶D(يʻ>`Y8a\wn[lIj6/H*g*'}dhoJ示/#wG^Y똂2˜J"k$,)F<2#LkIbwp^EB2Lۘ0l̬q62Ds ]6*2.=3k aGsbf唸ŘL' .xHhĆΥ:I%$qHLxCȴU$a!) c"F)g[Zg h {X˸g IHرv̩G?ggC#QKi kII|)%}/H@TJ/H>z̠5'9ǐTIq A"bF1  U*B#QW K"o{D`"_F|x%@+/uZ)VوP/@t[ XksHX-283iG5$%k<ت6J/)`pejDX\}]+~UTXɑx:1 ȲRN2a+"Ǜ1sՕb1F Lg+ Zڍ#eۊYUI1 D%mDiNBL?}ia`; Ggx*MPdnIKD@KU005n8&q 'aaEF%t]P4DL(2ӲB ^JPPe:?,q@[)(^\@$6X/xěq+2p',:;**@ɉa;͊qf[PMҷg KVx`Gc^Z@mDluj /룫#Dꊺ &(ڽGX|,#m6T @;k1, 9@g@!t9 hs^u@5D#+F̓t:-E L5viE0Qqp0",0 E*C A1֗gZA. 騚:K4YZtЀ,y(mJUΪrݶ2+{ѣ{ orf&= HT_5iI޲!K-AhLb1*5kk8(/7C|樇py@%\rTDKhLT@=B*|Dz;`QZ1[46I#*dOW "$U.d֞ZtQ,1 T1J %DEV;ͨ6rI"VŪR盈p0زK1!;Fff%dkp*"pB%N P\Nekʋr^VQmwEBPީED8ØՀ]j,n͖;\ eNs&;QC :Kݢemي2XLHSZ6"E- y |n>VKXbzza3oez0/+7Ӵsq[Qoٟ&#_~A힟83;!6tWM܇ IkNjRppCD"pE+"\WD"pE+"\WD"pE+"\WD"pE+"\WD"pE+"\WD"pE+"\Wp%<W!\A\kOpiO~yDWmWD"pE+"\WD"pE+"\WD"pE+"\WD"pE+"\WD"pE+"\WD"pE+"\}PRWDpq:U'\AJp9ޙzk|OiP󿷣i3p024 r6]ݼ*zъ6B_x>Mi9b}j Tat?{l5O1vU)2pV]rN6v޲xƗYn)m˒~Pc*n[mZuUE&!lw?klo/Ë)eZcU.j~ܵ2+g7ʟڵSP@?wEYvo2΄GN%>,Mh޴MԬΫp}kQhTWJw(I<,H(,jߙdbvUU뜎JՄ{6/v琧;?G#`qTlvs0_w[. Heqq= v&Nmqvtɓ Ӣ-TS M͑>>L7)51LvNᔺHć>#&=T٤ucWMJOs Wmӿ˶nszw_^s2W;}K[ ss9 jM471 vq&kKSN1ŻP_oD!9aeen'.a^l/1XfIToLhlȉ[|G)E(E|yWlTz9\,+~Wg^뢲]w.ZiBR9&g?)\ ǒeBH\cꨃ!+OJAՠEmKör97;FZ Q|W*ڄҭn]s:HחtJ%)rzo_|bYaLF9:-J0Y[?r!cPgМ;nsL:+_pSU*c2۹:v wMNVk# vdz0,Wc_H+3N_B4ʰUYD, P!bTҕE WD-O9)>J1q*,ȖZ+œ~EEMhBdJ3U& lQ#U髰1B BUPkVT4R %qmLEIv䣄T0P*H1 :ø$F;%k\! uedjA9w%X;F|G[_:ztf_ov#UY–a{s7j% :U3mKQZ}7}ì.}}; o%?lϗ'{>w\-oxa~`'l7x3|'Nsಛ]t ?gl)!ۮWm}`ez9}`_>h}=yUvLU໰|yތ1⋮qP{:!&mjc߽aOY4/[{^$H TWŢ[oa;swnZ7Xfs17ʡz+3fQۨdP[k$cL7erޣw}Jc]H]h ~w/( m@Ʒ;ȇGs|'7]Fyc CV}S|7ŲAӛejQZ!9brfƼ/&+/&v7G(.^Nük֒'1Im6i_L~yYp#>_0?uJ诳szϖʹ&[e ޘq~o&K:9ѫt/~$({{K\.Ɠ׽7c^h% ڷm{ ͣVWe]r+UZRibw4qv }gZǮ]-^.o %v<\Q6<.G-=pV0PQ1ɉF'e1>?Guwm$jc[޴7_ .~]gJzˑ_14܂}hTaӗC7 2H:U)K.Ivf0}Oe[~H83-QT<2>ʈI~XmO3`zکª $Q*YEzE렓>B0wȚxM к6΀g&owtR!I)%a*t%0,Bz!Tv.@ 'RccmT[ckܲ:uH;$.a깦Չ#jUuNziOZn%;df0LM.uD Nz N]M~;i!G$dv`AsJ(t9 ;w(m)T5ɐ:)}P1HZW\(A-XSѾOLΫz6XUN9v:[IݧnO+$r#i!~͓J UK#7h80 3~)ɷL(/do,6?mWk H|/?j<a'NcmT6oWf*=h?V MA nCX t竿F5{p_x6ҾT ;W %EF3N*zS/yLww_?=?5bU o7 [Ÿ?=}-f(a.p^%WK)W U*t>b<|AFɉ8ðʽ7$ϥZ ڻ}(q񭙆W xS}Th \.pq;TEbҨ.b5rv \b` el_ߍz3"^>vlݕj01p?2<[UIdoT d^v6>Y'AztBEi(X.f h"IqAtpGGF"ҕAШjsfZFpklGJ=eW66_mSLέ~:Hm=X5 7ZHlgF5j6K{1"dZļ9HI/JҫkLy)R%4U{eiA. )N!(H:&d"bZ0z wI%XI)@J٫ 4ϞGAkHk+B3ha?nÓUۗ<и*oxj&e-o+BSTyE0Qd1&6t`[Uָ ZɄj8h\V*]M7 +B: -kl;9;T63ilbTzXJQrAIγWAYґL[A݋` ^vPpN  1WlR<Sm6$ ՚~g'uv7F &xv6hE"h&)P5ܓ!*3VJQLbwprpnccmBͪ$ēL)*Oj }3퉗.mo/xG$s0Nw9WHB. huww{]l]~F®~/ N:r/^ p1+/xs<^\*U8{6yn6NKAԺnWsӂ%kA,taggEB6,ͮ3ߕ.>l.4 h eT)5qCfO:et)tQiυEGU%ua;K{Vzz3k\7>sU ֐YgO?ktl˖LOco_KXC"_8n!i3\+JCXjmNzw=ϴ9JSms.Xx l66@s am^Ime!q ͞9HMِ2fջr t[zyT>:D*?us0Xj}M6kfRl ʦVhӜ锑93YOQM{u^`pF~k>onOV/͒MALj\KXSkuQޞwV}Z2ѼkZӟ;_+k7{KulvW,en:ܜ%nELI\xk'=)* %% I(F:'T-6pS|Ϭ)S]~_v(3ӭ UhшT0e/P53xO9Ky|p2W fe!t O Gţc ^~*ְҫ SE6^%RV{q;4+lIڶqUL. 8pIIMat=fc9Ľ8aO l [|:pO:gQa2s6$W+ܭY٨콸e2vlƚ27ST?6|\ϾռӮ$+mh'VkrsZz7כtP@yWZԸ8-9NZ~\w_+xPdm 1C'^b0)`LX+ 6xl<'&g,d͟SxEe%IoQm:Y 5eujT{> H77oom/uͶ N9>y^L&|鼬HS^v6 `0/oYS6]K1M,ʂ BAxk[?g0fn=WgyqQ$nz x;x,~c9OuW@|mq7m#|ǥ팓J֯ytW6-dHՄj͠t%<ĥSr ?1.3[xjŰ7X;R: #dS HFQ<ɣxRv2839өk{Z8B .f2jژ%yt۝"g@͵3$,H6Q ( Gg`a\4A&}YH *Nb9bJ紗Rr>H1R^!Y *TpEЁ8Hw"A#'cP{p.p$9褭r\x̉;l xS( yMncRf\kxc,d0.fmHPKGTDi@%p8q,Xdu0vFytN hvUжAΠ.zqI6{C­7@@݅[,۠mXgy&`XZϓ_9u{][|x]W^;s7?\]^ýv|3gz6N>.Z>sUXsw`۷yplWfyֶ0[}k{;TO6&e޾ i8J{亳{zSiuo Vmzf޵_h')\Xȷ %M;~Ps1LrkҥSpӮyju;KVyLthsg$*zfJ,_UU_UWhe>) DcJRRNŐj*1ZD@ EkңwAcIp70F `X爎f=GOyOt͟ʐ0C>~.LU;/ +>()C {T4y" !g a!OB$Bx[l% eFxGN(6)oQOQHE5 a(PA'kA EKRE'$ xkw:Ky_5a4#gUcHkc0n'zmofL64?gIO|Wl;iHd:xm( XJ`E:(Nl,@G>WH3Ŵ#iJpvAf]&-#dʕ%R&ZYJu_+r8qbmiܫcg+_}}M1o+qԪe7UZM)t3PaXL.aJf,*{B@l alItv$@& &RbmV.1X2,ch"1Yo,Zg B1)kll 0D{)UQ?/)߂*(;W[fU"`$!%f mm !-aQTkV $cTsuO9{/"t5$dX2:gS+LZ+$aptbLn/ר$$3rjJ4 U@r1bW\ Y O5#gO9;~RwS7`lTE&ͼ]Q(]Xc|V&DC*@A]pa"bc>6g3YSJ ! iCmR4bFi; KbZ͠V;i^!c d`";3Gafd}fY* „,J% M6&Ihd vda\NZQB1RڱSEyjg=Ct4jcj=cHƜRgCG#f_$sHwUZ@S H7H:$إv X4m;|Ϝ$m3n/./ia=c,dW4whl.'–79WUw ~CyTݮuN OۃlyLg<~Ӣ9]R;jhI RC]RQrzh؎kpBah9Q0Z鳢DJ 0%STYD.6I#`ʡ= BT @OcPL^ή.֣2\HIe&P9uRٞ߰ $C[2?'qPM/YfTbHEP"d<ab6ZʂUKNaf߮80Fec 1|YYgH DB(|.Hzɴzj] Ӊ$cؤWՇtLƁ񚕊Ҿ8SXz 6Em% c3?x| {$Yt wwn|(1"+DFKPf `CħYy#>xB">ۼ(%2Z:>yc-MǺKo miރQZF>YdKFF~A8$Trdiy-ͫD;X,A"([[ѪĘ)Wf=IH)xw R}2HH o۔$C>*]dq!0HPIc2eCAg]W3,At5Tp +MZTN$ч'^'թ{޺l  4ڱdTB!B9CX2vB2v|uuV_ s̝ 4$T+gS+]u.ҕZNVRv~ByH.":uȧ)fm_RT8TOim~]CСw能CwJNr{<:Vv0 d:>b^e us<9JC~lL@tu2*DPгuu͒kV!61y:aW[L2P3t)@ i&rKVu^>gNiZ=+׏wzuTⱭC=4X*&Pɥ0NЩ*J17PJٵ)k{yst}`n1׵ߚ+mvo#a>ͷF/OQ^|tRv,<|ZV[hM;st&QkuN2Ss[!vHBHc(<"OO^b #J(YD2VJL!H&D#( PeDow ݭL7;*x>3ݝ)l}6<G4p.=E5I ?Vo3mBM;Agg=!f_ja7~.j]L_v SXj!yT+29/~(i-XqaTR˳eH/Y<{q-X{+umOl}imD%NJ]}rK؋jM u}ҭ=:=qV? t3JRӐ;*zoHtSh58Ɨ?ֽcЏ~fhRM믥<# $2}CdhEKRz 5(["<%apA-&1mz_Gwyj˒}aky|fݹ)_?_LmOXa;anU ^6 7')7hSE$UNDv+$H @NQA ) rW@;7_Sa~Wr[&h/Sa;E|[`Ǣ&߷g;ry:rA27H `O0XփnGA(^X(vvz2mŴ^ޓW fdYd&A.9H2@\,KJwێ=>^^foaȒޣj_[*J,Zmzm*M/&ussr\=Eچٔ+_L4H5W>Ɂa`& B|7@'+ˮ'[|pODۂĞ)^0^v\s@U.cf&Fkfd&v2ۢ+c-g[<wڪJ+ºB;K-eh}YuʫҢ&=p8m8i}ty#1\)* Oʹ$J&[ƤjM] z.JѴ``얘 [4np4jcјT5TΞWu #KzݫNSZ0>x):|WhzF2 V;`0P~0R(⟲өyqe6r_A{I;!ℵ֧CQP]!S  RL2b*6wbtB͉e;Zs)}G+x}G+o(+͑Zc|f!L߾6iqZgLFV7@r2mV ?}9 |+nZL^Fy"2&x|}GH̲m0Ӣ7rugr& 袬Js (!Cp1K}rT$$} ށw: 4xgo#e R@F=! c%/Pd6Uy,^ QS™ Eq1*Z֟R*ʘ10ݐ ֢4A@$llkv8DU? O.=z:q{3ZJjۜ89Catv˭9γfY%jq+zRS4rER AfT@* 9h5T9RSfK΀R/?b ZDyg6LʚP,԰,X$N&`WW7߯Fi 2UI A|X )n/+ ]FgJ̅emt)(hC>%T"+211,QYhX2RID3y߂~qu4hY)!d5k! PJH!h1l$A1:X;淠5.Ni:.@[*U3ժỸb=74a(-ܰ ts cpQK J}aʕvrT֏fgC"_ej6̴Cw+?`5d)N Քh o*)`̛:[gIPQ(:$/nFyv;߽ǜK?{>@v9ٔ88`Xx468o/KG>qt-|+â}NFk)C@2 $%=OE%@ Q뒔TV th 9dΥBe,9=p丗^rK{q/9%ǽ丗^r V ^kz S5|n^z^u{n`̍=M9O ξ% 3^ ]c^h;/UʲWY*NczW)WY*^e٫,_a:Na~S2mb3W9xPiXȉ@ay/=eg?BNKZ$TL9%tmXH1ƨ( BjM9XRƉyV*9LM0qٞU;]-[/[&~ p"}g;=_T+ CΫg_W՛㼞ln͖4g:~@}Mo(a畕a|sa :Z8Sy.6߲qøX=u5,K7o2yHki8rn?-d\h,n6gP`Oo]_rQP%02@ lUGыene۳ݍfS$82>r`-*RXY! ؆g#B `#*B]06 $rxT[&avͿ5Pnǣo ~})vה)@NLtL:?$QdQŘ(ضLmu@X"Dc2^4% 7I*# N2Y3bk8;-`zF삛"X CƤ2e+GR3.on5!`&/ި ŻvEC뤑Jt7)x[ˁ0ިH.f*JzD5ŻN(w`fȬLLFZi$Tْu\9(Q6ҋ(SGŚHBTNLU-6gƄ ;hLgّftL Qo>nBT} ZڅB%ߺ%g,5i b: $oz>}jϠ-5(P/lCjf }IF0UtUWNvwWHcȖ<H/6LJ 3rUI:kXTR80I~=gp]τiLUN,2:SUh)YGIE㘋/2"|}P#°}硎}6-KqU4ځ<Wo|-tȬ1G;SΪ9q3 y)uaߢ KaPA' "Q =7DqVY6Aۀ 8hqZZ] [; K?U/PO;.Rc v6c%1BQcY^AA2 :Ა^ئ26A 3۷UJY_Ohv1Cz Pw(n/+܏,qVP;}/8>Q,2o|:@}oz⃚±kGю#7>k>bF;v<,L]M[D8LǷW-$H3 CQY " 4ZSuWQ^0mSz9.4ys4,c^yEO|B)]O}Q4c!~ :<ݔU3߽{|3>ؿ_~Ua<ijuv|NT~uw┚-Ԋ}Nm]:Je *Ƭ;}1Ř . })ŘZ%νB cbLctnU6Nl<-:1:3Z^!Q`躽GrӱtD8lF3h-_0kl5FЩ"dS8h-Isݾj֗Q鬵Nflw?BOGÄ1v>O7hp"?B[jHesg}Mf&|آ-hlqp%C#csΫ{O\hW_aGz~8@tI:W I袴ٙ"ɻУN! \@i1|oq'>rԂVQIGT ]}1u~gjCE ޲Q 5TҎX ,ɉ=[㰑;nzNK6akyBY bD 7Z kcYKb Y2e\TAP&X:_I k<Ƚ :4d D:Y"gBt; a\˫3㛫"֓2@+}BI r&Z}f#<,JeGWGU;$?qh:hڅrfBZv@T6bbU51X}HF˾:!&]qjbҬ~Y0*eBgUt"%hr򹀖ӱit=|~~:٤Wxpx-=PQյu-b_Vf݌b7gwN/q\|G]#ѿ{8Dh 0{ưÚ">@ؾWCqI lih|46CgY}&Y?{WHDl$Eh>g.mU̿O d2N9q;%16$7FZ4:3c\ TD/msjzP$,ؚC6z>̹Ӭ2±PUwSg7[ҫ :SNV ddDej_&`uJg>( $f_9*8znw%5|4Mc&,=XBH HЊ$YHcRQc>[P&u1IGI[NYϫ#~K”V9*9҆|M~cJf:ϮchbA"it(QIl@'晡( & K/sE~{o&[A;4@ vSUO`4h=b7uJ0͢?FuеS[,uCڡ;knv})g9p|aDWiiAy,0"g<*-W)zy" PS R]GWK҂=͏*%~n~x-j${{?V.ْr3%ۀ#$S;09Zv(>r+jޱR1Ұb[RǦ篙x~jMdv<ï-_\_8;ūN[XW!Z}}-r g>w|(5w' |@/ϻ.'%嫷b B> Vb;)MwfxN,~ ͟SfsV#[zORCt12 :Qf3\Po߇|zO<; gr0s k]0%a&߹heQ9kMs bz|V&՘MAYzaҿn~=g啱+0[ ?=9]$-%b4)'\tҗ#J9D,R1Jޒ20pFeoh[0 <5JM0"(M SIeI>  dBc۶V.(gk4S_kY_T`d 48D¥8UnfW~MVh:uw;dJhmI99Xh)wn4sLS{cKIhjbN-3&/}L F_29b2&m`϶e4RUes@*6=%b0*["Rj Fel&GӸ[͌B+o]s]|{jf 7l9^ҧ6t }} ~p *}F ؝+xF IRJϪkލ@cF ozFiu?_ߦ .I)(J-.$2d|{KԷ<Ӷe6R.|:kG b$$'ƨIW>t`92'U9!fwwpvJeS/VOnQȭ 6V麻PyZ-_7UJfڲWG7,Ķ_mwE vn3?nQpy救vGwn[=^Osq'΁i蹃pWAvM/n?*sy[m'_y}_FP̘[J<+Q %mC|ajqގpc4@0KP swW$(!>XPwPݝw7&eT#N)Xٖ—^0IN> J<kdV>9 QD )SZXM58y)ȫR 0u MF,2KQ Zt2X(X[ǕxbgۘzN1yE,O, g.(H/&HTiT)mTX7z=.bd}괾R ? Rs],.tѠFSK& 9묕Da|II2HʬLcTanģ(cض>!sDE bUg#\XfgQ\T`N׺>c+qe }=gۘM#Z kx&Q ?иcOND4p(͈o1OvMߠAĄt|7l|hH!yK)O^a=HzH?P|"TFy| CcpѦZOʘJ >l>Ry6/<3\ oϵj.]]=4䙝>wnzgцU+Mt&p1џcx7j>P;2Nm]J{HuH wxnZ'ۤ C/ep[D[1~ sa!N&PPX^&{l/=] zAKFe՚0Fj. MLT:$d_+Ԉ&DH2JKbuuf2=B a%mEy [V_.8MD`}N}H}V^?NŶqp]|7_b?W|h,>;)+a4@KPOQZ+.($v+C"^JHDԹDT)Q!0$B!Tܠve|z3LǿgKh 05&qYG:R Д{ gj,2fOoHF:39&,f#.dnω鷻%a]1VoOɵx!)ߏI'"HDpN* q+p9^eUs7gW7ICDúydHQl7P@BSUeΞ]A~ Rƃ %R6[SShXK@]:o.X*dٕ(dS> :nbsk\*"g4d7dv,/Tи|61̿_ ccX|@QZ/ J!VG^e0DMℱalV:+7w}lB&,MˁZwb]K4tJ4s}8.b-i5.J.ﰴb8B=w1c`l\$X 4@ n`TK6SQQGӭS%޷TAy*ߝl'axa(:`uͲ]/ K>:xL|v;.  0$La`&e0Tbm W[:$ fD$r«@6 5!(ІrVǽ9~}[Ymfcwv1OTHTA8=to{*{mB4+sUY^tް)|?;i~BqzȌ" 'JAdՈDbeRU XT]h.DX8{T54iJ&75z#p? `s~yyyuܤr.a|/u5[χ߾6_~/?_73~WQ |)|% ? ~w鰄c$BCP`lkN@NK03=*:y.@-P8* "JNk*)-*KdUkyD&XdoՑO"p/B~|ѷ.n{m]cZ]9 jeF׋[|G_?^3u `kg4 y@J-u(|d{k N?c>/?~lqYy]~/_Z3T~H%4oz!ݵ{f C>֨lq`> Ʊ6FoE=1 Ot8AocPBMty}v=` {=nfBi>vؼ\{1?:TeoG/˛̝X6f(q غ\ PeUtiG (Y0AIҔ4B-c6=p& " -_| ˟(|]nCI~ɻ suhǝgg'\ =!dNy<`>PB7Ñ5A\;—OL\z٫ǴӦ]F=iJMne`{O0l#5L=b޺_OQnLQ*grsp{̿ϯe>S1yn˃~: Vɝѻ..;ytW+=>gؤ.%6<ը}b2[V|&~D-Wē}/U3Y7#z}=}z峏O)Ul#_bBfQc7tnij B˵1zJ?R=6oX4S'$c4U RRB5UJEoɬ}MDzs)W.Ӑt6 P)1 Z`|19Zyԯ;M4X?ZXoZ]1~II=h@DŖP+sI9',Zk0Cnp;\ N.æ.A@\5Dh9yC `Zk Tg!0x\6sEEv9z3}%ʁsE JZQp4ؐe6VV=lo4+z;eVIYJgXrdʈ#4"T\vԘL 4Ձ(]/|:F&VV֪r|n(UtFaiYkG.#;tv1oK‡1iNQ$2}huAL*FXTv 7<>TM|Moznۗ,7}ӵzsOPL*LfQRY oV٬D3~C 8X {sȇG_#u؞`NW ZfHieHUK:'}Ns>E/u1e&A䠼Ԙ؂J"GmR,mJ:Ca_l.لg#+\  η'6gSxtORTCg-"*Jg_R\k,D]#ĥpJcIHg1C_2%!RIl!ޑ9#vr<㹱0t~/HwSJK[Bް|s;z}r/}~6~&A>X)kZشB$L("l TseYwo^cݼc,Q;LQ{b=BG "^im% L5>;t>䅘 LrѻbFbI/IM0vck`u`GQTRP]ul9l2@6lR۰}3!mBڄC"vP쨨Q{|ϨK.7IJ{LJGJVBNkKdΎ5s?cb-R1aΎóc ?~DiSpA=Dk`3;U YiJ(GRFT:g}Z'/Z g=Y v@"Jt&k5OB*]7~wdsg87yzj5^M;.tE[#جbm|&I7X[j]k$i/l糽{m+ѹp};#r=|~{\]|DžK}4Z7޹-5n\s}c߶uf8|}:I#.JƅrH#(ЅJXajJdE PcetEt89_!U¢$4 "1@Htd˽m7s(#1tL͡BNk2XAk'ӫ,- ڤfpu5TZbt%EEgZ3EF";aBn7jfgy<샮&/2CBE`m!bP`VB Uy7(X&QqdqAiq=ER+QPJ>k%\V`;㬛9{lt]zY-*ohJ^2FpȷjֳQh,v )H4O>M oWz8}lְG2@)%]Yo#G+B?b]yDddm ~4E,%JLElE&QYq|UP;ÑDCz`V3O[cLrXƘRt;h&{%a$ JAbG1x / V,XͩP+* ((%c4T1]ֲ_r~TFika:o`6c _bix`DZ%f>Pƈ8B2TCp"IcFh'W`lPqS6fհF?\/K~UϠx]$j*!a̍;PnuJڽMӾ*B6@B.vҢ2UwS;ВC^);=/MhE[ $c炳H/AwK_Ξ:w^kx !HIQVH!Ŕu9$ڦּI$*eHfg Xw!L! J04"?bX5xBBksdASqKij >'n|4qآ?w&Y8;L % _[R3N: RB.(BHBY=RkG~! KnY0l DHe]%QJ CR$ghi*z1-2!j ϙx@"oMY#k!%9uU+rM-^mOwˍ+OltU+%J:yRdFBt1w.{vԸwN I< slx_ӬFoUڱ҆ELVDK\+ٙ*YRbȞ!U!AXy"<͋|~y> KX^; #@+}A$9@B-i(JeGfW: ?6iz.Ӵ ̂$4t ,l(LC51X}HF˾FAzvAz0C 2*-в`VeBF*khYz^57cm x(nmn ?BCA==xJ:pJ?ZRI ~V +%X>"%X)O}RH7O٢j=*n v%uBE c(Ř<{j:fegHl-8*u:J(iBB,9R(rRYZv_@cjDk=fg]5 8pӱjNf7#پ[Ck/u3|:/l&Jv Y vѨ"]kkjv8twRqMOOBe2ر[/f߽X?r>LC 'c%_ߓ}qq@m#(,6Ku/4(SrFyP[C& }Y2o=v9SrV39 wՂ,պ`.>cHnɴ波ڽs鱌2g+Ë5]6(7IP)"L5r B% zR$SԪx>rS ( ?rky8,߮8_nCn@-^qe0l_pMM?o|]<r8rQ2Q0G<`G W,XP  ?Fֶ2\%nVoG-7$-w+>jv9Ub9oyoX}홦մ 3c.|gcdmWe׼fwb˳Kf.8=te-W=Wټ\פwyŚ2[qE+s杶PiXW"Z}=}-wneC b@yQ(j68OxNF_\{+:@}R%Q7ٲ0&Ppɻ(El)]bB,iS3x vIo`Sߧ^J7;J0ɣ)>Zq,Z^|.b,>Ji'+tzyFMCd7O~^SϽj^!S`Owo?pry>_,Ml3II(ߪ:VSIן~}#gFE'3?M5iz Ux~0_NΧ mx} //&.nfpmT~o _︱kcUstI&Nf..e]J!,R}PN~_~z>,BWTVh:G]VyNǝ0>>䛍WOv~;=Yr|-S6Ow>JG1Jxr\eœۈU*Q+qU%OIG* *Fu  7<ͼ7gwueʿ-^8uX:7q`!fHFv T\gA)Ed71l2%Fa4FJ&:ɦ9يLL`ya P|(`WMx>~0ÿ[ ?1.߳1?n?m!_W쿜C/~坷t^r7rFM; )Boym%KǠcDOV$ǸqWϗtAg:+ {|gPbFMcǨ]b?wk/کuw־'XH kOهAF4)BT2 L5ry:ju:|`79:  W&J9_goN^Sf_6޴0]ffM8҉ */EFSDFKSΨoDxIGC]xz~nA֗<_VV<81|;&-H9(oMt+&-R;hrk}=QKJGEwlU0㘘Z8M庋e?0^jŷ[oy'T'GE꒤/;CAf͇.xn^@u,5f$h5f=:s&_bj .$P ̊%eb12)&=LZ+$ap;tb&/ר$$O,X "j6ƚB 3:kC“n,g͆srL>2]w?,"! F{A F_]d<+J kϊUjB4d /ez?kUdQΚRa8_H2̡Jj5$+X*F jdI#Oa01&ыЃffd0?lWaBpaz&Xˡ6h6'Y@ض }:ޏZr*0Ԋ 1H*@<()xZK.(mVDxl繫eC>^)Xk\+Tl\ 2Qk$<"*$Khp7fj3G;clWS ݎ551@n9Iа ]QYuE t9x3$G;-=_j.i <9Z;.oGCcpѶ[kBR}BWaw}g=aORC>|qzy?~糳gK{w}cۧ_}~ξN@ձ.1Jxv%</c9v NU&jBXZ}=XvRRYpRrY'ۢ~/ydjZ`!TMy/B|P(;(VWWJlWPbU"]mN b}ha0"d&FHǑnE P" 9 GEVxzVٿtW L򭏴XZ*%ha~#8BA4Us+EV|DOZy[%; :</}aJJh݈wT^ .N sDŽRj#|B+$A1~b:SY3?NDP%%H} 2ːd XI! [2JXui  J/ >2#brn<ϝ(wGVZ^z*>'n|XE}";6Y-1#^=^Ϭ YA8:صSw_߭jC~}o_¯7{4ڀیשּׂkӆK'wF)5K~dVhYY`IJT嬷·seqjY{%cW o|Z~8xߵ^~9m?u/=}67|޳U0>gy>]/|U͸s_謰;cK_gP//gk/OOhN7b)lUKM]:蒊"$c-v;.1PkO fy5VX AR֑SĬ9,E"@Ķ V6Dm2ZMrP@GwNuu椬 !dI$Nj5 f_i!U 7[iv~guE΃r~@vI'o$*PkC2%JH {dM1\ ]|^T\iˢ6+DKt \'Cq&C)%"{l`{(1c?*1ӼByNr>L%'VQ#-Kdct}dzcUǺKVL mރQƚ6F>Yd%fe焑_gJ.+ٺb}zu9\]Oo|Q:,2&V&L *l0YNBL 3n$H?BQR#))%I:}TB҅0HV}W6IcR Aw6f +l$栂SHl\M"d1TN$чG=(-WCuX2hcdTB!Bg*(/1el@26q%χ^oMwںZ1h\W5ske8,~8d/_nKl@->be0l7P/[g9_\G.{=J=9 (;z-rĂ5 3 Μ7,\mLCW SMA=Ȥa0Fֶ2>KnQo.G-7$-wg |t:u{Dw//SZKCgZ"ei|  p܇f|%E=Y~VKjz$nٝ3bɪŪyftzbr7L$TG{ EvRkwh6aӍtI{f$a"ˆ Zfe4Πo&U)*e:g5-H|v\boɫ*6DDӾվ[족j3o9P^5NN''|o@.]HĕkA^e46č3hQF( w6cHFYO;IAXkNիd470Xwpi{%œ#%_}qkN:#N+r60\`x0M\%9:J䊳 ZJUf upv tJWFڧsw&ISqA|w[M"-Lm?\h.X0ug8=W\\jYl~03ƄǂjDUa`f>>zJ%Xm'Usmo w/ 2&jI=lY>;R bc-Ex u›Expm]vxj4= IM/_6H\Ŗ]m)ɞ)o4Hdo5lؘaưQ^`4^}誼_n( L4+'h,*Y^`>ⰚrC*g{:axZ`B<UX-zƌL@ZFL&Z iVQ}u:Fm0AS0*qC-D5YQ 'ȄtY` E.(qCvYI(*݁[)rl)B$2d)@6>9:x%Um/!<9+O*akYA쩬0] ӔGk9*7pQ)KJ񨱶a@De `Rai-V`̊B}[pB))&=)d9g7 =A Ɯ k)/Ss%g:3 O^*tYqv|󇥏tQXrsӯS7L7OAaї#vybiLKJEMi URr2,YKRE#&x!`^es) Ӂi]f݈̈mGC_3.ۂٸ-;4m4(g {X$jD,@PPфӠ)ﭡi@H \H☁ / hG9AQN#ȁQ1094 ۂ@D(/"Vu!&٬FHHqbK%)DXҠK0I̥719 Bo&j383.;\l5dg}tyX^ ZtnK_:VBa} K.>m>DeWnMdUȿngŽ!4 E*@ąE0ۅwaǍ;mtgh4)/:1]m;v]s"Jt>Ydm:YM/ q1kO4кSbz{0Nm{=}}6e-.n'=2V\kRN-nKZQ xQ:ѱ7`v?=GѨdk.h7-|m>?G_"^!L9/ړ~xP%CՔBHYF$ܺv* Tc.OR-,1۴00)`s8A2 ,(E f-@!t@@*V X|bJJ!z=Ն:[lL6gN*+5>p޴ Rn<Z֑aqGkZ ?D h̝<9}XQ6i)pA˖)*o>_n.`#BI%(ݓW8S[\}G&ǮVa/,-&RN9Q@ny@j]@nQ@nE@ bĘrd k9h?82EkmL$J"Q&ohXFY; ZDlAcF[ :М qo"g*Xr4n M6Q|ịu7/w9ECч ܅WWb:}UEί$>!缦Ă rFG^pB+fj]P[o"wa6Xʄ hRaDs);;C*,9 嬔o-3|`QG1*<~Yj"!,A0: S F&T/%׌ϐT= fD8-_u~iDA[%xVKkmT&j5*F*m*$1g>z@*:U+fN줥:(vs JFa`i O TJ0CZ%@$8 0pdiݍے?O8^ޭ7ąN࣊j9$cѐ'X <сL zI[>v7 }J՚ғXP\-"?1Lz> B hCZ |J7km H ,64tOF&z YO5dٝϻfӶ 6=;S\-x2WF4I,o zo 0p8Ec(6@6}rmٲΆ O¤FSSv3Dna쎆~%j #!V^:B;p "v|@K:</E??k쪘&g(frdVAf9#pol.׹{vSty> 8b󳳪umY#i˫i0) &\@o 'p5޾t^ϋe}FðLo=Ѽ2lZLӟ_+k7{c͡Gm,ZfIrM'F(Jǔ“H5OtQI%II&ŃQ!7r_@;kJ#@uo& JۭlE#MѧlG^j|}7mrg)Oz\6 f/{u/k m`|0aMWfҧzIm6: %kۆ2=,v\ܽxV#4_*MkjOǰkɞi@X~9aw <~w6PԤq؟8i!6}H\%OOҹK:[Zj¼:. j6ρ,1!ԟ5زlD.i-¦N'j>@:yGɂ'|+.,:D0>gjE&B@:eE[HƉOv0'vMRмXY;wdtdkLRkژDF!CB f';]n,+HzE!Y$3P|I9\ mT/L/-LTk0u|!K/=S"􊌊TXL(ȭ.K$h|$Fs]tj 6GK橶U.VA_$(#'Sp|.NnV4[R2e9O{uq>'7 k("^DN2cv q3'9 e/D +-Ajow!%;B1yRNŐRQlHQA 5qHT.p1f z?Te똹GcC&45C&VZr{}q2'_0 h/Ir.c>!ܠ"aKLR" 4"0|/cъ"׽o[Q>xpH 6N}'>4|2D|O;QkETNEC(O}mx?otE?hɇi$,FtȤ/it#}JBgC֏N9Ŵn;ntLݾׇKͷ?6;y?>6|;WחT~ ~d4oqP h}3Np~@8NxA 9?^z˭qDlѨJǢZ;*{uՕ<"uU 6ǃ*"j5]WWJa{ujԕp{.@k-PupڌYS m|aNh :}d)уoۯ?\_M&-;Ȅ` P1J)ۿ7#z}55/vL8{ߑ(^{bp3O mp>K7i'et3ߓq/3K;AԠ g׏K">nt^&?U@c &_P\!2 f귟>}<\accE d0R~X 5beU`\L)xZ֗%ieWv+N.%"@%sxhlc퍏R>"OxrD:SVb=J=ɁDU%QW XQ+&]]U*8TW{6ؔƻW)80_b@ I  c6Z'SSO1Z8k'12qo¹P!ч͡+(Y随|)0\H!OI_G_̧`Qkv񷛿Mb16w'NkQӐ'S /Ovzܛ?PQSv %492 3S`kYKye*ESKJb F+ޓDb@i1Zpnc I@&Bv"Mj";SxQk\dDSM5Ǔ-&t-x;4ƴ^yJ=n!&/ #V1 241mbna5}swtκnN\&'SP:Tl" hC0JIHMEL0Q!9&k0mR s'D (? EӇikik/B9tRzCGӾaSaX5*`B¤-) 7.f2RxQ.ۚ2B5>N^rv*te DbAb-9Kk¨'3ch\T-6║Jjri`NܸV:FYD[ljީ5r6|b0*}ܕ:?!rB{ArAGIγSQFYJMƠ !zhSn^ z,\ZXJp&P/NCJQ&oC_$=j jūQ'~ ڥ1gkj#"iR&X)E^q8:Ϣ5x%,CeQDnYͩBP+J0PPR+%#sx e% %0""D[턔7zkE>Ӳbn2Z@W!igM!B|CV2:ўx,߻"qȶa[דjXxhkِ#&vjvխ4:% ]jL"ƞy?G^ dx{p[?xN14u5uFy[yP>h`v}-La㵠Uq遐4 mqxgɱ$ZҜANT"Š]3v0)/q8`F[{@XKr|)|FcpѺߝxPfUDn~5đ F<1$z<\ѸjO ֽ ~&suK?]_kizV`P6~$1˨p@Gnc.HmH"\0R@pPO`%3zN=jJ{Hu(HeGT TєԢz%JƾLJW6Nϗ FcYLӱy`xAG.yy頸yqzY{K5{ er*ZBA0!Č qL%.Z +D!B8=0`+ t]Ks9+>OH a&&v/{و=$^4&)7Q|X/>Du,@ D"ֈ;% ;bQ! ShQ;ś gS$}7>f2o۝՗>xb{?x3_6S;S3ZJ߇|zݿHEJK`]$A]!IEh݀wDNz%=7Jz #9I1*P "TyOTC(mqYI2r\Y$MQmk"pzUPpXKB]!σۃp$~vF6ÿSgѴu +=n1Y8<1^=^E-T{W> Z8# wNաUjd$Iqԍ&>T4Pn=~i*g:!Yo79n R|*d-hA!hBTtBA1򮉂 T&tIXLKuLο7`'u)]l v942e|F7etjpîmG淟n\7ץO}5B.fP)"L8#jeXKIV@RT-A@ gCrz/3aɊ&jw/֋M#[jeޤ<r߃d `y>|¬ WgA(N,CL<[ˏ ӆ%jav)5Wְ& 7A6-t\e|r垸M.l }8fyoYxFgGD/--#ZqslGIW vze)b/e#־ 7U l{yOEۤwcJ-]T8-7Z .^vBƺ:סk9s+Zn>\o7fZ.Hב|iTϙ_`B,/Dq`o8GEæU獲ۣ0eA39btqt7I;Uog56+DKt tNAqLDL,jS%&1c<΋g7&g/7F_s&(p@oH/ E쑗=aIa7?ݏ`NeYfTbF* HC5:)$+ Tg*9AN.H;SXdJYRʊB6*&l.& 墍a:0݌gեZ}ytN|h^g,dd2 i6*JSq&Hἴalq mE75[IEC\꓇;ywsԟO⥻e>o1"{DǤ%w%@ `aǧqw|v|p}FYp0n/A >ଏhzc]E϶)苂@(Pc-y)YB9`FF + \u+5nзWMP, d%-V&xOql43I 7v! )#)ٔ:L.ta~(*j!cRA7^@O]U .D [<9֘RW#2SZR*'*&It ?)Gy\]~Ϗ.cu!@S$S Ai_B!B!ND+CXI@]MsG+.}{?t /eg<;P:+~ڽ>y5y,/Zێwy*)*Ҭ;3KNBZl*7n\! wH;'eg!I2udC!g7Abʊ >i_іҦo>m'wFjj-B~WW͓kASӿ/?'7lp. pw{ n懞nZ/>MuZo{Ql]hlu&s?Ooƣ_O*Mۆ xZ}Ѯi辱_Xy@ )[@]Du2  /xcf.E(:9BZwKaj|}ps-6Eq.~\aaݣFafz&f PsƳݹQ NhbW|cUE &PCMO( Z00sRO!'-S QLP2TY:FFjc2(,}i  `Qs)(tipvGi !yKt%-ޕg'#/z<b bZR͛\ieŀ2tNXA&R(߲7ioq">8tzL=Bw#|4+o-vQfo_~5 6ޘ .a69qk}q^uH^oT˻|a&3lMQN9:K,cH]k&X ʶ({u)(◯?F/&fD4^zt^ĂV >$'(dT֩Ŷ3̹lMq72(crJHL tpvhPcԻ)~X {8ʴ38')^PkcjQ֑ ]aR#1J26ՌaI ju.-]a0 *QN L&) Pa%Z36ȶ>l2Յ.t.<.\>f7xI{/|#Yf'Ѡhf65v1 XbԜ79%Y_GhRz0J%VݗNVGI ]S!65! BB{g\f֑_[c8= K~Z838s2% a I3D30s)B6ŻH18#2X3IYDr2pdPHfgR>l6'}˪/E#6[jD-jeF4&fxMFО?>!wAEl1e&JŢ2gBXـX:!0d=RV.Ff PJHycl8{|YKu6[%EX/A/zqۊy!$%Mٻ"Dgi,( UF,17ŃfP}C}>< [yl?6r ~g!}QqC8!{Iyp RNac ;VӱXja{ .4@,x&W7<#@\!#*JL_>hgΫB<͉iigEK3޿v m6r_ w]{f?0_,m!q[_[wۻvtnFn;\yv>[ݝ/3t2kߞy5W#\vDd=x-oKspTpmNIfi~q:甽gn7?7h)~d|]iO>_iU[)C ҁL*HQ XO0c"5L5KRP挰Yf5,]S0""5]RdQ#%LR&{#YVNւv).L+Ԡluτ@er:KsfٓOv:z=wFlo7_xvhz cnq?%0y}}2[H,JbSЍln)y~9 XN[wSOჟ9k+x9M׹'|7`;uenX|!|YPVf֞rE1 /5>[h\~ ɁEb9дY`r.] .s#,_k˺b,zo!Vz]~YZ\EWg~]d­PGD9rv6_ 򀣿eTrڊZ#{k[ݭ? 5};]M5<;Z_z1n^r%>:~P-:Rqp-C"{T#ϳR8nmNfo- `1<0)ErD4AZLjZSZ ¨oX* 52f Ce.M{IP[d般_DTJM Svq-Ib}1'ڭlAY˻oԔՍv6"ꠁ*[ť(X/ZR0d`aMNlPNۍsA52}<;!hHǚ UZ'YE''m-Kj` Kut-ΠvVթX#32[vnّ|LYqA~ ^څrQQ6P|uhmFyc+}b#"<:gȵ&'_^ r韭J Zћ|!\ch=zj[ޑVxRPd'Mv.<8BrVT$_1'm7% zh|14ϓ"8|qy,&Ğ&U[hRCNV>D]02XjU`4EBcXluUru=N#"sкˇMWn$/uw`uqk. T#LV1XЉhأ{\QF!rG-:A/Mt Cj6rp֭r 6ȭ7$L{ioHQzcveڣ>$S Ґ.zo dhO0Uyf2Vd4b4֕Ao|4:˨b).epX 6m%#RP,ط(M.Z罦C<3Pu1'A]d/fZ.RtV>SzqBGτ3yBWIv8LL[\qi^ha38h@n(1Oh\Hz H[bjQ\ :zrlVg(>mz֍ SFƶm pWj:CT)-zǐ}gG2ǯ"6srmƶO6sTLxݝ}_pMTm1{EmjuEP7RF7ԽtT &c~~k807V҆s81} 0Oz`!|kh2TZ`rU%88X_(y! ʹmFrӵj瘟P-b[iYY[) ޭ:iu9geq~,Hon4 Y|YEO% ?~8NO \08CsɵeN8nj:5GisbSk&b5WEɋyO($:ƁaUۭ,zؐЯJ WvS.:<=A!nc15vk>tt^Ji6[c/&t*L`i5_n4~'.@B|96vQapeX阪&0v?YO8;?=a!OPHo/Ŧ9]|6c6Ոl*S,.ŕ@raZ@SU+хRz5e3?,ڸAo%5Pw})-kjL;h2bԤNZ0 2=gԠ g3I"D"4Z_QI03cƂ*(~ -PP<9dg|b EW(}RK*4jC!rU&]q`&dﲆc*t*ݦCJV<& G$-EZ2W_ˇ6md6۷Hܳ/[sExX 5XmkɗBD(.%։,?\Os>@*z&$OY)8ЖV%YO9QjS\ ( It< r֦Np{f>U% A ۠4S6p8,Oe. uԵw!N\ߍoaMhgw=CCM@Qo$?>V 9{5)*gR&{FƗFT uhjK^B-XZOU).J@b ӥ(j3@RZ6zmr)lK ZG4aryxU,Ar`(sUs5ɣXbx[h`!j\|Zp'a76 G1A /ǹSB^\5g"VvP̺j #6VV!YLT.,i  XZD>9Q#b)! LAWP`*rLyb'g[9]gf%P*CbS4_ֻjĔZ؉Nڪ 9FfyT $Q6Y 5yM>:(cSTIɏ*šle2Fu}[x-~1. ֢[|{yNpε[cXݖ"> }o1Zx@4- =jrW1T VGyT*X0ǀXReE ]ȡ++J>9 &B١K1*ά CÍ Maз6>o#'7Qg΄djH1-I$rҚrR֤d ~A&NhH-S*V]c&[>TT- QJbYG@]UѫPX%1h< MڎX,&b)UORxˏܛy<؇ARmɷr6k6tъI0T4&͸ċ"[}vr݁w;&wQzcnP1_ .`U6.aCJq^_&!]]⛖w\K\:dcԺ()BV:j33 )(T\1ؤԹO-d ڗ+FFJe;!gBZZlC<(<:|{zgEnQ88Nv֤!he@i[[7"}{EyУ^YnY> ֣[QqնY2h#Bcj+#m 8 ]&La?ձ:J8_^z.a07OX GbT,B*{% N9[ ۘx[^fR(搧o7(vüN37G0Orǐh Q|/w0l{vPD qGa6;B[:B/D\:D?;y>'<0'ૉ*樑6Sh9F&!Tm+TsBلvopl-}"f8y<Ϯ|mT# iPX(ٻF$W~ź<"`g3Y<-AٔԖ7xRXʶ-V2+*/IH+q9dhADAvJ$3hS703H>\Qd Ie+D!'1B*i-pU0*mbRoB,6MJv׆ ]u&j-!i^vm@:9k+/?9BLmR7+e* 8G#їPiJ­I:xnk1Ȩ1FV9Zl̒ l 1ȡ"mL"WKKǹ j#c5qHRNSfX mGس2)[vI]!ӻio|@Ft;+Qx5%jUJ64Q1%A6lFd^Ŋ%sQ0YDMItHh2{b P2,[89]Aj{v9.PM=_.=jO8]^g5-9ye\=.MyF˼іI#ƈȜ%< @-YeŃp+xXM;CYv~|yP]#jn6h !OipByDP4~|&͋f!?|>&ӭG\M^$VtEGp3I/׼Ox+2ER›fddIeS 4n] mhۧyߧapAGDpk!=e#7/t7yu[[ұdkZZlXu~~ 擟/<:0}gv3O{N'ӟyCA&iV d%jɫ"\ d?G٩_\ۼgXUR:{yt^&LQ $<77.$:zٸcqnqw$vTeZ9T9 _e:dda3bAdȓpHb aU}˨5H@PR\&%yq#LFej=.4vץ,T#Gz2]e,BVv{=K]ǬlhmŀQ%;XK_˥:d ed3YU\eo= yp% &TY 6*Vn5qv[<j7\{+Lzڧom+m)L9/߿*xAҳ*5Vjր9ldȚH$|9~ze<ǔhV yGFe3Mm2ٴcZ (Vȯ*zN7W$0Jfઈ UBHWBh F _H8`\I:3/?~7\C#MipA >\% nиF}`㛁".EZmEJGi h}CpE[! \q%+pUu*R>"\`+X|3pUUofϮHkYHW7yTy?_]]?B9iuAr*/\vQJI!w@fT#!gN/֕/ݯ%jk`/ ܀ݸQQO]tz]+NfE/!.7*S'?O6j )km isT1cYcvcuǺLS} t*4IIeȀGU+,,{#WdnUrf S) :{m }.F$@I@P2 y G_2<=QYW퓸g,rMIpNGlL;-!I%B.ao,!V3) ߝvvƓ@c#x$3"Y!;*"S@țA`Xv)42No PRPIBFfZ$CYΪv{̌X"UԫmJ_Ö1=/tJ}i`JȌYAj4F! [ߛ@M}/=pȌ[V U>[Q*TC,/M WU*sԌZըVz?Vnj4Z@,eULzJm&Ҙ<NJu)s5 9:leqfZF А/kSK7,D$,/dmpde% &ˇMFYdQbkT6N%ܠ XE_g쉘C3ւ  9D/Gy/e QQH1!1$#(BʊQA r(grha,K 5Q@2D=LL}Ԯ8c@RE-U m)5, ʭ-kPgn~w#_jj6oԮcώNĎJUCu4vbLT~Ehkk %MG5HFE}E H4 ` IKH]\"cQ gqv-|s]JؖYa_N̎8Z-all/ڴ`OKԮ*2 3vh }} :ef4,_ cj7 ~nrsuy|ua'H#={}C }_Y:+t1?ҁȃ%ISrPkRK-֥T'TJxKuL'obRƬu=zViFihjm$wA@݈ ;/^{%yMqڛ7 &TI dep|V`\i+`7T)3^uS\*Ņ#EF'e33p]Z#/!  %xS87;h3^x3%ͼޣ4])Wb~DO/v"Op:iAb8Mg\XQB;6РG&;2w=9R1x1۹ء؉fc.y]s2-`E@!:5 ˱nkȉUYUmB'TYQ F\ Vg9.w5rdBk(d@Xz^M|65Zm/Rvj۱cWuUo]ݎ׺hѯbM 2hj@LRc9ԓ!tZܳC9SNF7u#p&:xP^ECJ"ǀF4jMźvBFA€ʐ䲌AXDrV&n#Ds`XWgώBs=]>FCb|v9n;q<<#s1\Vvyr[գk93V&+Kn0Ȁet@B+$b(Pt{z`8}BGZ+p}P0Ĩl&w=Ic\52(׬*0pY#YQ0DGBqJdw _Vg<}v;g[hGz;%&.tuQ{z;h>%*m'rQ 3od_#+(e,K<].Z O3NOxdv{==73Efp1R%P%MF\i"J9$ta#O)t݅QGC p9޼ջl'eZ32s$іLC''r3'c{fo0ևb?jBv ,RT۳r!d%Tċ/=]sZb_fYLrr sHÜ<+aX b|J9mdha?zGlb]"pL-Ggn:K_ w1x(ϾLX,D[$Z,bNga99ޕ.z2]Ӭ`1"&~8^Vpv*td4&gKt&vNQm9ђp,NNlݩKHrqKi^m_xMgz8_!/]]}9 YA_mE4e^DɢD#i(MLsX=]U}]UdaXcP!'syadZ&duΌ$M%0fpb'8qޯ//^Wx]$m}\J$c!{a|I%Qٱ[Tّ R#=zrэӫˇWr:Ӎ=+u^1.콯Q.{QD|pysY8+֓g,tɲ77YBjchB0I*PNq1O;v;=:v~:^`kQb%IYG LI3y2%JH,'`>2S1&/53ԍi{F*QffhtNa[SIPPJdȞ ![m a/{= U3I^$NFs#] \F)l cv( Yyf'dajziCʙ IhU%-,\)<0Ts1ZSهdeAP%0ғҽu^d%s 1}YYQ5eX B%sFZԃ1=1LubÔ^y>פ3od2<(3ID_+0@ƁwC{L'm;mY827:];߮g(4>N|jYY>ݖE|4Fd~BI4LZrCR4-@">E7=ٶla^"3#+|HYhZc]% \l;}Q{" XO#i$s Y9adc'Jv :~зWgId,A"(HLZ53% tKhve/e ^hqA.:-,?®1yRcG,.$] d_dHʔ s |v BT6,Atg<9%2SZB*'_L^hO)vxk1u!zF;H&?TB!B5CX6#X^ꫮӱ-hk xbаLˁ*nw@>xZ4'snWKRnoGzAͩ/dz9ଲTwC˗iXn|cr@lg|k@֯ {*nLh['i=2h[#eb}+B ,ȟKoj@,o~+(*NO׃F]l?+_a*v W8LJQ:]YY-ShJgKQPL(B0U_IVvӥc>[@SSukoL<ɬC=ޘ٦KNHHk4ZF% ԰jƽV9 *~e Y7._&a31ShtP/Mѹqe 4#Ħ P M`r X;~ 668*ȩ`ͪf+MښL-ЂVZA0!}=EV*K'B;#1R QIc$* mbLT"PɸXTMK ;%졛kmn#++O&Ǔ Z/c1}m&ݭWϳ \ ,Cu do L& EčcN{} `vpϺGX jCw!8C )T- J͏ 2eT\RFw2}l3`eVf_];s_5mN2OX4YY/:jD052סh!h>EGVe5o'ƻDo}]؟-{ӮaoɒJBjg۽kP쵽]ہ<A :7.|Ƨ"2oǼbCdHV6'Q 5.l )g@l^窺}[U=J5IkAِ*D=@pL kΣM$%Rs9[S.8I9RR1KE꒤/,Šlǰ3qvڛl]ɱϾI^RN[d#-K>;6_[6rcz<_&-ևtzVwΰv]t~!s{|4!cqw7p'xc]/5L5KRPjF,3i]P}0""5]RޢF3Ik@@"(S -]HٗPEֈY$l"cޙ8;rY9M|} (f'B|rh7t68)-쯧ZHd:xm( N*4ӺFdt* drht[ǟcz/dz]Yoc+F?s)ny<܋bv$Igpx2en[xj>lE!=qx=qܣvsh#ngSSeo{~& Xf>ܻާ s~.*Z.&[x|(w@d/W΄v/D<8Nt e;ǔkG7VF+he42e )暫*.':5(I1I@0cJ9muH6!Uˡ*hߒE:cjr1dez?Ol =ܳo$%$4KglL|L9| >F:j?<>SXi;xYC Wb]V !9;5C*Lkt-I L-gkz@\=ٴo+9A\,黕]՟yqy뼹h~z橗{roFs3VR![;blj6([RSTZdj+: +#@WSM{'ab J $D 5&pb:F0"i@Ug%x^0Oʨʺ;As9PQػ . `(B5SoXZy("^5mH̢Hʚ.*R+Ȅ!' c.^נrɣ""*Y8?9〔 *+ӍevT'(Q:b-\u;*K+ٴm2R=2xT*9'ޠ6Y..C+NXE 6 4>Qdbh}Q؞p΃nc|;ڰưx">6P~o\*CBK-8F|Eb> TUp{%1}fGʪ`=V &{JXtrp}y8ڻyp#z%j( 09 nKݤobf& f?0!?}E^;Nʣ8O>kA˹_ŔL&2_j 4C ,,5y&(6J^\j+%bg{m9/0E&;ZEsnqfiE|7މ/FIԟ,''lX<~`R 5֨}41E ܘXږB,){:XD¤ AL t[Y>|Yt':IɮvQwz]\'7o}1SbSJ.tRW`LٍUG]<{M:v=1мGn՜qs1٩-_N?"}8|S8K4SlS;@uz*لbZk@Rc~׾A>i9T81H0Z gJ1$NZ pAlB%1UW0FUju̅ M6-X'Jquly r_)s_LNx|۳͕zBBeO1wrc^|:qT'oyVTZ2k웝IOXJkTSҮs-8FN0ȬsdbeBҌ#oQC鶜$zz<9?j?\n]H(sW򕘥EWeHƴUvEWo62U=\La%w_Ie?17m;,9sI1/'nr 17 \\6x*υ'%?1AU= iϩ^gxJ*yG4wAg՛,Ŗ8ru1Q(?{F>Տ#nĘ">C!i)-sz5UտvRLM2΅HG ЩI@e>MO5Zks XSbYo>B?eH+#glW`1䀘ɽ\Ggo9oRDwCky6ZCV҂ԁ8k@^[pNڔ3-9 }t'QO>(J 6yes࿁[K!"1&1`f$L:מH]Ou42Q5XXyQXi#H"ջg"g=cB[%\̑:s`-D+ӫ=t̴X2ℓx>dY9 6'SCYD-Iuyx 1G7Y]3O.!JDKFy•*BQ4('|V] &D8g:K -[o(J胴BLNy4.jgEE0" 'o3KGZ#gJCN}Rw9_z )o9̈́|{fGK3/݊fx;mg˻|Ŋ|i,O7DH(PIUHhNc8VA`>erwBdM'$FT3ʭr8 FfCBH0c4>o! |nSe"A;ǢecB-I+ƙn#Ր+a a: #\z,IǨ.rb /,gPzDbFU_B6 = =U'p}#dF*5(% @ڠi׉@IQ7ӇY"тӂi)RrF9璶paP)qr9vEs$AV1'N줅w}[s-AMF˨<'E.q_epNK&i )"c'7R׭OlY׍Ȳu݈GyD"RSot I'Z$(EF1OpDN)&Prv hcmpC-2@< 3|$?1 B`w.SdKTM1x;vs8d[ЅCF'8o3:qH Yr@qikI$r< 69T!8z}{~OϹsĉb:[Q+2qN?j~j:Mj~Ve{&x|/ I#J*tK|Hx[!7V_pg]z3ޡ^U3gla jk!.'DbeEHZD^m$""Xe].2ZKghEK Q)`\`HIHS@%mAh#u8sFP-s[˵pi$ygLD<Ҭe0h)ps i3ۊyɐAmrY-[D !5_;c@*k&&="G/UAXȤg5n D  . vDv z2@m&(jՉEV'6J@f +:Զ j`1ɶt3,UUu;h6;\TR"Ss>[z-x͗b; z 9)Wyi(OW׫X= a\j~}QmU=_rާᶥoσŃiϞߠTaɽEYm/Gnu]Yp/jɩ?]5ni}Vn; .Y?~Ќqgr/ruL%a Rӧ`ɨL.ק2TJکWRW`NF]erɠ+w*SIy^P}J fNF]er;up*S ]kTW%u5d8=S8n7ѝ_aԻC2hx]=g/F>:O6%OI%ٞTC'YafRy*5S{5Sٹ~ *7\z\]}ȔVWG.êv>6u}T‘w+٩m)L'\}謣cWWʥN]uŘRaZ y\+qЌY(E~)Ƥ/>ao/h9y_!P2"#|y?O?ǟt0Oh~l'8rߝ6i%J\k R}rAb\44(/PgN|۬9,|70UFmYp۵콽9uC)zߜ}&w!eU4|88]\P]ZA;!P ֧J\FNՕAIBP"P.>ؾ?\1I\ݱ$I i.{=VmdOQ=u=cT2H˄10ֈ t80(\d%R2ODK*Q1HL451)9C"e`k&g G]D *FB0TWHmYWm/MzKeA/R/{rf3o0bKoi$efL=ۅ7zx3ilQi7%r%iܜ)[؜gz? k&A=E{.TW^C7-vlc^tKttϴC lu?VY;lslVw1+3Ps 7DfD|î/;GϽ0އNcpK"[&S Y.#IiO*N05I88PD%QT"9pRi6r:F"uI{ST)ƄܢP%OJ7*Fz ~gr|iqd嫳WYSSݮ,Y -˦5W1`x>5aQ c*c͸Oi1YT,*YȢ*fb;j&leYQ^E*BQ4('|H:DH%)ss%jUׇX#B3mkN_z )oK}6why ]@'l~DyS9ژJBBŠmgepV'1H+Yࠌ\܉Q˭Rhi$\YO;9;i@1L[)91:EP& xA<3l%rJ ȑjȱ@02KYN:Ftkxa9+FƝQQ?[w=+/,m"D{nAzN\'F$) UjPJ(A7mW:M~) ?rE`ZQιm2\6TJ\9|4B$AV1'N줅I&PPޒ[4d:gUFDXhp(8qr{hmU\{G=sYKo]4mZ­o'oiΚzcH:! $A)b4 &rM1J m_1VTao Q{AG>`R\FI!'\:gYGP5!tQN<`lt QC]X:ktގm= ͐EѧI4@"ǟ`\MtSEjxxHZ׀xN-">(vU6_ST_+.T@>DFi ($&BQ34(#3IY*c%Њ$ySc /7%z6>`: G^QͱCGBG Rc(_1 ?3Dd<ԓMH2A ԢN,8 Qj5CdXс]@o.MԬX {v]3v>8Mv}>Ѽlk9ălAFcm'Dz+~^ťC]#wjx?aߍ/sxSFO~ʴ (|g`7o컷(1˨aȭNmʂ1 9[nC-9 K-mʭTTZR!pL2kG5D O5)O`u& vP(R^&^&^&e(#dpI&pIM!"-eBPƹ鈁lݚTz9O5Zks )v2#g2[ic̛ Эp[}_ϪcM@7gzZI_Aiw5%2#>jZf^5+96ЇȲ1 UΊ̸XCԜwr:&Ξ9hm=Xd(@tW]RBF`4`np4LvDm0C>&RKD6Z! acUSʚD}QQRXDI2rƻ IHQL5y <p?ޒQeե1l& \|fFŊ Ͻ(ÏGVZ^z(&ofOԢ>{<.Y,᭓ _Z;w˪9nYU\w0@!aݲjleYs/y[R$e  .(BHB9=Rk'~=P c-₅`+JT[_ )H) Sb9,E"@Ķ V6Dm2̨&j9(~p;NuIYB!!ɒH$h`J fߢ鐪4=9ZxyRί1䭑8w|6>,F8[$j V RD/YZ1-Pm, {RDkd=F @}.nVvNq$Tru9\ [ܫ4 t=~E6 djj41fJ@U ў$$^;FdX Cc6$IǐJY\H(*ƺ49v*S61*gYx.D xsP)$\D*n/r$>xz:kJl~G1u!zF;v 8CPJP(d"Y(^Ѱf#dcy6TV?tNU7ZEFAm>ִ|}M HEtZЁ'۱]:%*)*GA[[}r}[=vc{dv`'ۑi2%iΧbY}G"s&Ş4`]dp˚qW:`uE"yjXu-$C.Gmbu2d֡ eRMP4r6\{{4ksCsҝa];k}]o [>uj ?R7zV3-jN|Q.wOje2_~O x@=B슑Ct5C):##_!ReayQAf#J(5/v2Z)1з֎"g_( PeDvhP5#twAʟ.#]-[ڥ-[Z'x|(WZx[]O. 6 GQvr윆aߪ^Ii$l}~żweq_$A{r$ *0VE1YI5h@:&:BkŗZ.B0r҅}eTdLQ8亙8;B΂l^4<{W',z{|13U+kuO=0vKb+; $@mP3[r&OR$M$QA4#"./xB(?sMʠ)W]ceϚ f%4ug5h hSE$j0,8#JZ6N.H @NQAz8;t ) ܀sh|溜_ϙϻ96V3Zfh#7u~~ί%3;My΁G̔C⋋E(LW|b>%Ag{'2dq63]~N#ly^S#uE denC)8i-M381 Tɽ(89}Akᄵ\[١]tru'“'d2Y ScޥlvWт8T2M'֞i. XlėZx5Iyya\$dMtRz@^k2` D+`2$ 1'L2a4#{{OcEfr7c犾l{W.ZڣY./}ף^N/.j?񤶜YFQMώNN/?_웢 9/'qs?dYy|-k28]DH1k`qU*w|uM٧ު#a_p⤶Nkҭ c7 ( ٟ"H`Qyߦ^iZ\udhY>^ɦnׁ߽](~=KfNbӈ@/_ vaH_]Cr `_&EjvQ46Ncoz5˚YcL>nyx@;i\OwS@k@Sk=&cg/m~3gi{C91eZ`{>Y/>E?"7[;{||˘wߝbvz$u;,Ry)~][hJe蜰uB!ɤ1%vߞ ˋ} 6? OهkBM-3 JdYT&#kBgӁd1ϤB | V6{N>{m5W_a^q9 2vPfeM@+tѡh!h>EGVe>4o]!.<ߍYMRY?o>4 {voƿADgW>| ~o. λ_}!c2$+K,:Pvxʁ3vtl>f$V#Ip r>K>OZ ʆW!j"dB eL^?6vm%) Զfs('\prN)2_% b%I_'YJAƴ8;Vy׏^f=(zf*їX02sJ:6q>@b'i l2fl$΁(\llnnƴ4j^ e.bz J~R>R/orka?RC8_R˨)*3qB-$L!IA'o f2^4[KbSdKJ@7eKEKГ,{HA%+7IbDuQ3v5yw'A1evj.IEe&MmBXsZ=KƙBtmuv1'G n!sW!u6ӒlrxA4br,oI:DwILoP' JjQ\ 6ӎnhm)(dϑ[Wba}Apc[E?+m:"@_>%/ĵ( ׿Oϟ)?}3'?UGQkO>n^$ryep5/^LZ+Wˤ-b7_]YYz#cuog=#m/e  * Sf8 w7/cên%eGoƼ,.Dey4~ukR/dy,ܽQ6N"ME_վ5<u~̜4WaX.yLbOu=UYh M8+Low&E־stgI'=?]tVń aiR_T=R_zg$1& BTZY̰}$j-UhYآ* YH6 m[NGnX姕m"cVxU`d-hOe\%ͩXrw<5*,7hɱ>b5'^j"b*a8{ꪵ2fk(]e.lؒ/vkEsp@pa[v Lv@ Pj@WiVh㺩|_p>9N|x7aOϿJDrX||wȴ]Y fĶ|q &dsdbRH+\,Ygji sN&Pn$ˏ~.[Q 7 8/fg|iG|ӗ8nnOXSm?/z(6SewR7JD0 i(6KGRt~-ՁڶeRQUJ:P"_ѽ>#GS | fNj4"2i1ˡSj|7TpFg#Z*UM["1&+&%" WttX&"^]t290!` *RLh@;R^]c0U"]!Fس7rt5R̂w#olh|]|]qwdŪ~7t됋_{6i4|KKb+:qURde1jA]&TJOW Ψ,xZb5CEc`JT^k,Te%{#gߋ^7o#mqx]_;Y]]Kvbp#wr(W& 'YX{> Km 1 H5DXA҃ayaya8 !_(3z >x,d6a͎%`+n 6lB4xM $MֲL4+'d(d96)_ n9mtL>|wc*;:b"[wOpZ}Vxt.ڻ9Qⓡd6>$9ǀ$tgkGPkDqu|e]+v8%/IBd= ky|GƶmmmntQsjr^7Ry N:O/ou2Z}m{Ԯ;=l[:4~loh{0uw;J[dD&3_Xx4۸6Wά+p]-jFo[^R!ztqh6q'O2:C0L"Ť$3,sys{3z<17utACk Yj ɣ*IիX16f_uʵ*) !d)yyin <9L+8:svO',IKoy㔱M>Rdwv1_ЭM}{;. Ee +B2Ժ;{Dx55(5cMI\b##e[a~Pr&Aհ}\c$D+~TSX`cHT͘UH%F%b*e`Z-VC}wz > .>_|n\o^YZlz-3t ?s;2v0K& Uÿ$\P$'SߴQk'I6*ao$sܗy)mNE,)̈́!BD\J駴6!xt֦;"kdmfa<0;Yp_%P*>\($EckC, j']@\\lҔ\TL5$*B@ZQwSo7="WbQx |Km#" S1ƣj(m9UˏڥCk[uMjC=B@/T `]I1.:] xX굊&zo "ۛUlA>ч;$TUXVEH TmcGCYcxhFN @37.gM6 jThz9P:L<8`|9dHY2KZ.k^uY0@_!$gmUj". =۠N W=_&-`up(O)QƢet-X!l$KzZy1Nާ?<\,~~wE\Q#"=j*)Ӣ)qjҦJEP6 1/Tz$KC_gmB|rQx?>#1Bje *McB1(*A0m_fʵϏEzc YցM*Eems2'u*PZZW*"jbģ?cWAA:z㐇JGY:[t~5|nvw\$Z: a{8{jAn?l/kAXg@l5eBRz oZ ^+r`ب`}f'clcpk2tY IX]x%=2Ko|aHǣ $ZCVe~IV@Ux"g?N/y9_?61_+xJoM`B|J %J%mkuOtT>CddlVk+ZtN>G6|;A;܁/P(9.O`r0$`㛄lfQ^^Gu{-CཱུwPJVyx<;R›퀴^iV=bOuv%ˏIJ6P|rg=.KJT$ K&Ntu(P]:yϔNkVPD 1T5 2M,)P*g bʀZw9ݱI7y7}+v]ܸ~,n6;^dN2/$7w_(ӷ! @xPـ9Q.3'*|䑂*4D >o䬡2Xu(,ez/)ujYy4 ^WdokkhqCC ?S}YP|:'C>gōob C׉vR׭׳&Z䑰Z:f TOM 6*\)kyyZ3Th=w? P&^k kT2T1yg L![dNWA{ H0)NG5He$ {#XYY\Φ6W1H Ftv;]C5E8rp=3$EȞ%N ՜+‘<|QwHR,54eS6^^7zZtp`hPOE R #xUuN͸u$FԥD{2Ic`LpcxI$hf$)Rx-& Gwi w  ĺhi^>`n[m8wh$qAqr6gͣѿ~tPxo_Ͽ].kϮ[lk;8p*Ӫ>OlqySBCSgY,+yl;#ӛNR܄j'X?Yٖt¨Nas 0i}%(J6UUj|~j&]jߺGÈ8Kf ]ȃ$oO< $A^#;w%4;Qt츹FO:VbI\vNWK\}[/UXߑͪ-S]O2aEXQmlC*v,6yZJ篫'O9:ZGJX02,SUyͪibꦊLU'pEkjzaSœNm-✅(S0RpcڰD`4Q%1j@z)qJ4Lj\Z& dÎF_8|{8 ߩLQ7HzE <fm ɁU^]!L5{`=<}zҴ82o+U9 ۪7|QW:<~T O@.ؠm,..(4AW*UIRs0.|+zF :Cu;/ IqPȽut@$R| qeY<ƚB^9 vw ˽'%!BDuĘ|CRw'd:2Z&hCiYbDH Osl?̜ck֒'/aZ )ц X!5:'ԉ4*BT\f BfZċ8aiZW}6dDKDŽmUTpAvEb,ZGq(e0_e/aňD8%\+"1> A2PxWEpAѤfFH1A^\dqtYKGI"* 2Hp*x!ifE.Je'l*}K Rb*ezieO|2TJ.Q'4H#$p\Tdd r'X/C{uUlNyETӨc ?g zMj{+p)~su=(&d}.pЃV& #O2U"$[i%/gYv{tE/Zsb%![eR:-7UP)0*lِH,)!j#7A P̎DB:Qr@)vL94}CsXKBdpQ)М΋+16%և U:lek(!J Q NmR9H,θQEE5{dLh Ϝ/RL2P;:>P:͒c"-l`k .Eeњ&p$BJih*"$pnQv/8ȃ=A@~yХExZޏi~\Eg{̿PR3qtg&rw燳6xE.gPz+rWFxr>~%{\/~|VuIŝn\^4)Qmug '5}g[³?&:~aȟ#C11 ]x]]? J؆k*x _ o Z?v^L~Q󺽦=JhFX%Ͳ|nqt;^lD/f./F+og<~w~t 9?\G*CD^9ΠR0E+[`r>Eg~v;&Kd12k*8t.Ae,尲89e2jEHimb֯P6LKV.x:J8߅]{Wb>}/}*m Y.8Ip"^J%VsX">ftW`՚&Z@Wv]0f[fc^g;FzC1Y`vLת2JdvcPھ!sk!ތJVUV,e=s0W߄2yGRz6zӤuӤT_<\\= |8AHcpdzQWo9F䵭J RBŝI"N9;a͎׸OGM'{f4Ưk3|2(zŚſ6FmN Uu6_ٮM-Bܝ'ѺJ=oIbI' ةwWf L|H9"!;'gCH*0Pkᑤ&V3J n`Z$bԸ!-L%` <1ܽ8r_oWby%n[BG}C˶Idd|كAF"I^ʆ|1%rYo(wLąJ}Ԓ$jT&g(D.D+=4.Rg&EZ3&J<)85Q q+ٛկ9bsJvY)v;a SⱯ77D R*DR-I*)hIsUbUPY-EaMrrϹ ܝq}:Eʦf]OEq5ޏ iP=L'H-ѐÇ8JX$ uD`,R.q`4m6(b^dTQ"eX uu rX< G8sGwvC&1.&3߸\hx?&TԻ;}w };6~n|,,wB0_7 7>XAISP[= { w.zHE)T Rmvelu엸d*Kl#~bJq*S8a_*~`*łm4rk:),&R$\BCvQ ӖȐb"l ))\G9b B6 &3⩣sN?&@hUk!Ġe%#(8aϏ _JPpeLSV{{o?^ku%qJ]Pġ}(z[x fWǫ!k~ސ>J]9IVR& PO H;zT!Vvqs(-Y u ǣDР!] &B9g&X_'iB胂|Г2h]4615R '-}ibGنN{_Vd \_ %˂}kaնj*vMs,冷zW~5(/l+ʋ⦢C\[REuH*80 YN(kmEd7=2@XL8bd <ؒ#Lߗ-ɲǺX.]lw2]fw,E~ʸD̊ 7F:{Kn5ɝᬼ< 섣p&!Db$ERNdf8dxZ@W!igP͆P)˽Scz'sclx QC[Y:k ^OrEҴC渋 չ'!Q2Nf }ɁMe/œΉrb4z|.l4)k;wɿ_]pk+w<2듉vHL/ó?GwaydQtّ籁R m psN͒g|=z4]!fݼ֒k\`ݢv)'u]5Kٍ_ nzw[]{ssBgQ?b\.KGBbQPAw{58q_hK97it8U\Q8om&E(PH.ȧ@> `ߐGkJAIT.$!vYG9$"DP=7Ձ٠~!A Y=4}xꥯFC"aG;t,JJOɉwL 2kQiRnsR^背4^=h5kt5У̨A.nu K(w"qfe^HwuT \s> VdT@*3N z+OLGfU6޸nh|l.ǣOM38? ۽]ٸˏg'd) \6{?8Zz0 !` X ZbGZՂ&Aq1E%^#D#^>>0^1cOBEHʡ.t*"9qjLЬbFecJ`f ,#H8]&pP+c8\5r6(})G7K KDYқ Y`2ݒԢ;hi/6x͒SN!2gm̲g/}/1q,}J>Y$}w_*>^,U=yi'|BDQ/wlNr: QYz1k*>??_(kg/>Kf֌\j&ɡ)-Ke$ݐ'Ɉ;ny.*Wz/KiDf7?jZGԣڞYXO ,DVj("AcwOJ;]p,CL0z/y%V $ 1isG˲Lnj|lc\xr5aKO10#?$"& x26cKX~,]Lc J1oF%g~k/7'?WMbpzv::r:ZN.?q:v<䆟\MNΆy4?uyJh&%$BB+W%?;i6hŅ~`Fg}y]kNG -䜘~λd[̦̄;&a-+(v]ճӰd͜wMg p9>tIóabZs@5јNg,{֤c;i(A3xvGNݰł\)#!FB_nd3E<+s( ,Gdzmi$ÜKCVsE}ѨWd//1TWxS+&Kd3'\b9xD^CY'7EާQzc>mck5ز-4)u£UϸTJD+TyLoVyQ  .;FvW膻'@ȋ=^`.dRTe0Y|;UbdžR<[]]jk>X9hvU!kw( *@N&%SYr6( ND/%Mg:DYhKRpJ z蹨 p6@ɱ*T#gCrM(/O %g,Ry=bnVN?/Z?yɲܙ_Ӧ3rZcL^g":9tp"D`L)0?i+/:]pY;9IԍFa ´/-W^eDg $= j EOCcPĸ8;2Ff-?5Ltr EÙq:)JeK~TYt؞VXH28Aaa췦}mM" dJ@fcA mL5Q[>$ͳaFd>c ]8Lդ:HȈb$^=Rw!f%>'Lit6u'1!tMX^Ҧ"5&YuT63w8GY'Ldn&۲)!OiFu{re?:йd|61;?Yr҉4ѐ/kiƁKMdhЗ|StO|2<:ݞ#1Zr Gro4$[6,ylp}{YdFtt>io4.(bf x%%!-p+}Yoܫ_HH,̃KibDs^䂅 =IH=)x&`{&H/)#))%d }2lCC"sRE$%2!h1O=`UY`:"`lf LX@ri%g'0 xpSÁE #8 [ Fnh'_[LBz|k,,BMuOzƧ\e?|WHڨdTut;\R5֍/ݝ_RMjwTw Խ_Bizn͂]G`6[%x Gm|u~vZZz3^r9\KNH/Y~D\Y3_iU;|*Kd%'iq ~O1))?xз ͏t~u&t/??`- qOzO"(=Z+nK[}gfB恾cqn{L{Mtu'8>=0[e{~su=N:=`"3g24~|eT}v@8<I%k6@7Gz0pfBR fPyh,mB/ o4g롴;(ʿAOJiym+l$83;f }R:M"ibh >*e= kV҉q 2_gfϕ- rڙ 3) !1khCeȐђ$S>E@ o᝞BOoFܗ`b-"֫V-={ЊmVط9݁@ #fܞiZӅן!pHhV=pq}NO:O֟ݳˤIH@4o"Yr ]x*%Hn đM^AZoO XG*2Iwvu$I+7{YaaXL~ź%#]%Y*I-W:ts"x*fW* }cN$q C 9 )ܴOރ.TT–Jh2w15h62 qY8\yd?};~~RI<8GpHy3F ṚXa)jyd:!n,&3 Cj#_U[;LSJlChRxn/;cj^HdUU0ퟖ {serr[޽uwZ6N핫D|Pv,Tp'F%n(!2_Yr0y>$ɶ.R(`2RLE AAzzMaC*cG([9ʸY87*mַUf,;j:;)0gɶu$l€ -T1KL0f PJhf 1גet`+iK58B\ژCScfܯKSƧ#̶x~kGshFŐ`wLm*wTe1R[GShdC4MQ5&DFo=%tRЃSGrun`y7@un%E.ZEŻ2M1(4Al5S% c>6vsaO>^uQTpxzͿ]afmo?p.Q,ˌN>ˍo,&KB#ZkSgI~_z+N>/UdY=t=x"#=d+/tﳠ_):kRM/nXw﯍Mtm<߽POn(=~Zz7 %5uRG{''NW)}MECu7r}+_ Hg6)Tյ̋ݾy5-A:ʻaYO3q ˚h^ܮ?1+='s8_WMTgi ittNaP2l ~m6ղҲ!=o/Crx /h{pɳ|y/WOzp97ƈ 4JDMɵh.(f3yqmM (6u2MJB+z |̥4WAfyDBW8w6 pޚoH8l ʭbhDO|2q\ڊ̲5#dIY|1CsEHrZ8ù` *ՍʂpjzQK&7pOF[$3 _?uz[9d"8cli6dÏQgo!\Cv-eTAa fCbHԎY)1RUF9jpI}HX !3sj/FS>~mC6)\p!R*)% K un= Ic!us5;gU28k#zoslζ$Ɍ rClؚu}ﬤ-%ts_ל28: Ck0)ㄌjZһ(~`؂`1li]T+%6-dkJaB71ȃbj fn,Ā,2!` `ӑP =w|Q]Zs@a:[grD,xu>Z <a(r,-x=Gg; `0O<h x@}q$X M rқCPBCK a :Ʊe60$`1˺lq`(J,m}!t &_od#]22+wopEd$AِM(pmU BHMe+QJPe@ s'h`R7z,?.k)@&S'%-@(1j3ki!m ¶Zo}1[K0 K`3p}k).B`AWŜM' s$00;y>`8tRH ;U 1KA4zUܶ T&a p Y *T gQA.0Q_`F*Dtd7o v>u7u]0%o{m(`Wz*<44ΰ_.C$`EHP 6ʒ (0g ʰBnDaF= &)D/eWXW ͎1C*K7d-!h`P߻. w6j{{ bcGO.}`dmFfT۞a{CuDm9@yU2'.FAmvXgObPz>ME ]!0ڃJH&51*eh"q*{GJ'r9V%e(AyReXd3Q:To;~tWNv;џ W1"tiMZ}Y_rl; lٵca/6s7D2J@߅^FX Xy0 M"=V _`nĜ2vޱ!-P-LG7XT}Ӵ%zFQܑ|h im=z 9΄`ȼ(J@yz6HlJ<\ԆT0٠},H-%NTií2R_|L; #d|ތIbihm XFf:tHX Rov6CbDwB>>Y%!03o ֛ AzBx竸)]bynM?>=Wfg=M,`0>bQjFP6MuWΦ2ÚZsFhγE!r2z317ی?_?^ vS!#;i6k80)^v |R9 _nfho“Z+TBafL%G>,נh%q?Cyx{7Y]C@=:mH Fx=XCe ֚ .,35>/bd4z}!?-nE=>d$M{OQ֓4 8\RZ7Iv2] %6r*˥If`TKEu,&Bf nһ2:БhuBE[ \!JCkbE;3R7BwRP"!c R/'z-n9== J&5Cwh./U xVr7>CP.ZvsU?/Св6{t|o @ $ = qɓ@2@ϑ%f$@J) $@J) $@J) $@J) $@J) $@J) $@J) $@J) $@J) $@J) $@J) $@ϗbsH$[tC> uɓ@I D@ϑsՑ@J) $@J) $@J) $@J) $@J) $@J) $@J) $@J) $@J) $@J) $@J) $@J=_sٺϏεdhOZ9y9]NI89*xҾ;[eQ>:__Ӌypdwc5$yUcm>Pn͇orLrHtw8tS MʓN۲]^~60<5tvZqGX"E?q_!=^-{\շl@/׾%ׂ5KEuvt7{&8\֞F= T|yA",VyaY&u|:ņ/3z6P|̣7Ƈ~<>stܾϾ ITM7eߔ}SM7eߔ}SM7eߔ}SM7eߔ}SM7eߔ}SM7eߔ}SM7eߔ}SM7eߔ}SM7eߔ}SM7eߔ}SM7eߔ}SMٷgľbi=[i}tϴċK/&:O/^~g/×}܃ן{?Nlvޙ]YoI+fa`f݋AceB"$%۽[e2)RrmYYYq|e 1*́%t.8k^cVJx Q&ӧasZ_oJ @5/U,0Ť@iرȒT 3X, ;f {+w.U1l?}ſ$L,c_BŻ{N~q6?tp߽=~|LqE(TL My>G@!Q|.Ӝ4ivxj_x/<ؿ_U;iQ̹2ʕlq$^DDMs2˰-@8sUGH#~a=ݼk..마0;ϯۉ> ! {7s?Jo*scjO03s,q.hsS}/9;*geNI:TRq&""k+c<5hgyB)wz`)rQ˔fb D/#L^1 Dk1oЙva^:)L*^' \=~;A>}Z I+zr?7hz}yMIl x_wu=w>q6Nރ:Q?qtaO,2 n&ۻvt:o& g[lIҺۦIϋƻ;B=z^j]fW71ӣ!v1Iqň}bGpy|yJT7=uY %ǟ5"Z-!mKOQQ<-]\b)sc9 :[Y5י_ӏigھg\gl~Ielδ'LN7 #Ϸ_DmbDn'OLJ2]+6vK_}>pCo*mcL cw 4#Mm~ݶ3ةbw{VjB ՙe# u9`tjK cx RWk젭WΞ0XE|} ;"PWaa:<,fu3 c"~|3iٚ^ پ۩)"x3X*CK :u!RTU ~UQ gY@K"ŵ_m-r\SLBqEY)TKV=D Ndؘ1D@TG 3L̠M)1)#*z᫡]\D/(ҕ"L&S y>ZL))l]6ATB9#GzS"` g7D,6Ԛ-BXډVlz^*NMs& :|-,K5r}G);'3NZ43 0lqy?NP5/.fx8Q]4 vipgW,sglB)3Kw+F/Y: 5qZu0ՠ\gՆ5z٬fХ}T Dh/p*ԑBʾBdѤ&S;%OA픣UZ*Zj\B`U9/ 8B[XDSC 'aJ&By( V/#cfl]UCTV(4E.P4'3T .C(԰2$}cJ$P~IQqw7 a4Wz[R˥4@?mHh7iQ\էy9)EjT1%+8o- zOJÀmÚ=7{oXqCnt~V}kl9Te^Bₖc_邎AB x99RY3kNC,jhXUnh ]_U m)B4fpAM6Ow5}jۑ7'bW@Nآ3#k5'CG{8(s~;ޒihJh[d<tk$B+̉4g$ZqU Ib#vi`GPS75o5K  ҴAAFcpѦJN .+4Fm|HUwScC~Sx7orM7In~q~cӫn{Ͻ 2?րGr.wO@?w+p| I#>wl-TsZJJM^,FWVWB@*MPً9'@7¾shsp|hҡF'ˏwGwь!ݞAភiK4'# ҵ1yyyƤqz.Tܫ h8d%/*+ Q eU⢜c&ǑӜ(yˮ/N# k`mxLstRpv'[W@|>8d ܒe|D='%dK$ ZH|Sm+>V|L)l(j5 n F fM}a VhTx2M1-HQd4R#Lp>io0.H0fekb3PF'[>`pvYx5w;w@ЖʀuaĠUcvI:9s3Y z˞<[eSjh uJ9C>r lH0>CڜʆT\c5q*c^"x8n#2b<Čʣ=(֫K]ugmxj")i% &.n(&-SiiT("pLձ ұaʤn? 8彑bHR(1h[w" )uSv -/ru6@R֗hhL 9Z;Ҝ}3,{XW@ZSYrڈN^UKo_E^tb b:1==myGc0QGDqL9+d&9s+ؼ<"2}Dq?=/, 侤93DdQ^ss搢`&R V["ʨ~1Uo!RYLQr# ِ504goDu{26=Bd08Wgw`r¬sjvl{__ob{9Ea7`ֲD%uֲT؈;rp̰""YqkZpBvEGCǏXdޢJe)\R0\Caɘ9LըJ`дa`F1%i2R +-#\r w) ^2k g%Y{As7Zj]z#Df?Snjjb{7E7 FWXx`Ÿhr)wQbYNnokf?p3JG9E]Gv=Z@(Bv/uـBV7P|0nFkiI,U$$R|1< kò&8JxM' [w$x'JI1.c ʋyuڼL,^f<YcG?6)(LhnnUrE~<8z?Wi6{߿k;ws MS-NiVNϐ^~ |gk{GgcVN0WNtw}rwѬ`Sf!`5v]&PDM{*v&}~7N|ES:UW`ш+P/E:YqX0Ÿ=*g?|'~E|p1̰ßR/T-~shtm+!N5|gpqK/Uwܥk'qx'_b{{kP|X wֽL4L QٖEhgC%3sW:ޛ`4]өm-Ѕ\o0\@m skA5f*i^/yl}`SţuYy,Deb~Ӊf izTSA9%XQ$FL^f$!2BT8sM{'n%2%vAf+>l0.xWɺhjLY5LXAцydwzY}:E;uBhg ;8!);U{Lq/PL#"!P1in0O1?M7|E͒t]L'EĿeȫ'ۖ`8>M\N}Ӥ|θ &-qvH&1S*;k;pҰL[p$(% @ !{BۢwI!pFi'ܻ%ZnjoijZ,c=`QՔ2]|Ow?UәqZe ?fm7wh<\W;o_x{EM4AD)%]:tBnV#-FZENJC). }_g^Gcn=>5dy9Rol΄,kl';5cgF{neϵ-u@RLo<+/3U/4SElk@¾/!F7F6QbZ=ᜡB8l"2L[%Ln=Ahc"Q'!GB F%R 69k)rTv6v&ΚQK!{fenՈϯ#G:z)wg2A'sxH2'+hd!qe+AtD$HgA-GHei͉t"8jg6)i[ygct+ɾ&ѰJڍݷ|3{s*Wnumjjrj/Z/+3_L a$1-"b V:+I D)D9ǔ<)2JRY   DHN[!v"MR]#cgF|tb u­sZܰL.g`szv1 ,_/nid;G|:`%ɍp3;jڈuJRJ48RaE1QXB#,qx`l9GͰI5A%6ÉL>&ptקow%jvA 1w;ӎ]AmSPxKL5i*f8 {EިNe0XɀT;-L8DўEX"F0 .XCPag&-Df`DL?6DĖ~3DܔΣ ⫉:Q`Ē|F|M"TFp\h4X*qd>Iq,g\p^($E=DҔ+f:;FY#,BW;zKsiɦH:ERp2a !Z)Ձht¸Y$E &HiL'biǦx: a+Ir6p/~hFd=W_&QcI' Q!q8Q֘]!))) 9^sR{K0ӵvU|:\?́H}U( 'AP@)+Fo|}ÅϢ?G5H|w@񨟞Rꢽ]hFyMPx@[#EဌyPnœ0;iμF)k\W4v+{nԔ9ɕI1HdlH(S<\mFErY`!&J/\VAJIkg:}Z6&zR߬L?|RL7\8B%06&Df!n]IW1@B55 F&rD_W'pQ&pn-Y/~S]!q>La1Nysj͛%4EKfqeW۸vGܙ-_ |ީ슪Qx[rhПIlvWk=}>ރgFPy\MK/sPr's[J"(9_XeMͷWW(Q\07$zk5t~;V`pp?N:΁*՜oCģe3l|?g%&r+ 3[Bpcl'3KzAgI+x q!vOA iR]Zl[މ] 5S<4,#MDѹu{$$R1e&iN9u,Hə*.Bej1Ne"9crf耥ڄMDZ{gh4I~K|10;悝2 bEl'beK[R춭8ǶfɪSdjK }t>]D|즱u{:u76a<:\,NepS"[dzߞnw6O1H, ^`IF-<8.0R,jfUs4`d HeD1;.c @B}~7rvK8q1sysrf9mS,Mnb,ɲtj6)o_ ljOJj#PG.T.rpk}^Vg\+Ȋ+xCƒ9N *RpQ{RA:GЎ'G2U>A>L@4;&@Rq"g7^{ ;j/L}ĸ`ۺ{tu%ntL޵,}{$|x=2N ! ;xtq( UWiu1i殖_wu]Uʪk[! A.\uf{85;GS\˧JS85 =b5ST *W+R ֯jiFѳWA~96|L& t7G߿rsW$fv5 X^Z~Fҙ.ZPZ02T@3偍|Mb?ObDg٧lvJ]ɫx1ÇA*ծèk6.H. +[HRf Af8^?jA]hvB#F ҽ$- .w_|-VmtfF7]jYq{]Zv@Vȳb6/1zj{1 EU&WQ[nHkǩ_WčէZjʿ? =Q)߻yMIM^r yB5d_|ֵ`=&;>i4=YOu&[`?bb,E3œRɷFTrX +RM1x'`>q},{\¸;>"ATLowkDq[9$ qewg tZמ,q95Ӓ KJo C%)˳fd #XrmP9.8 R!ѣ7/#'Җ3>ұ^Z B,'D@#%!+H;o_Ʃ0c>)CZ5/AbAld<)&l"a5ZJkε`dfB);{yсd맾`k<;x; oWdzo,d; gBZ2) iΧc\&]RP;ʠOsӁU0_G}{/ u}X..:,n[v{{&wgJ2HrYS'IJ=8fuVA5dxd RiKt=RWxSiC%PH0zλcfI@Ck8)0@sYx),54t_@ڌ.於P|8+?l% n ><c;Ĝ^N5f.O6SAփ5HZ):HZdAQ^zJZ`J3&у QҁV0Kb 0 oDP3\uo 9fRzz%Q ,ѭm_<0:"sƾfˌLwVrՙGϾlݾB#fډhYP UN zik[ VQ4BË:(G1Ĉ>z,z +7IyT'2k(q]vVAsѰ^*K))%3r|V M)AEn# H^덜=RILG>[κL.thӓR]njTb{5Eڻ)u-\vgrYS2Q W<~^!l#c6):QLaTӋ)p=-/Bq=>d 9f #1I2hJ b\I$IF22"TEnޤ 7º -:dv_p(`hQ_Fq7TW^Cn z^{/^ѳ!FrsYjPJ,(]8Edf0$'jpCcbYRD-,p+=OnSAjy6.(L﫟s}KO'% fxUzX:-N/k -ڤwtf*s[%?"&y:;?.H̋{QOVN6lkM#.7o Ϸֺ 0.YuĘNb[bN$>3"'MwfnKU2j7վٺ}c]̽VՊދ%Vn53uy;gFއjz+tխ^uw lW[h&Wk[{=տ]:ve~Mhv){ѱaΑ{l%縨 m]mR_u]kwx׾Rko9eTb7 ]LIUcspB'Hd3ŏG%\6q!aqӿVh+.U}* EͤZ!uH&mL@pR)>>~Cd4^}Ǝg׉y\M`|Aע ;ųӋԆJSm}f*^NNpILhLO$#_< ڑ~kV9NDL8֫Msܸ# CnsWA}rutY%,#c W35w^ 1.S 5P-ޏ:ӛ_tp|tVZz7.0ݫrK/_5ỉ⿿zK\fi".qa9=39yy(6c\tox3Ozgݑ\qYG4KɊ?}dڕuSw EJZݵS|Ge+8.[wrj[II!ndvpw~Y ŷGHra%Yv˔܎dlfbbSJ% Z-԰븣3jۼE1.ut^;hG Uhy4q9?;o$Y+|Y״ 2e#M{}^a7Wo`У-OY5N^%^QYK_[H7*ڇ,P+ЪDG oqJ#A&z( 5;Ŋ-HϿ*tH*ބsby Yh3J: >J(jE^JT`0J i(|J==CIh ZM+[Wצ)cΓiAW\J\d#eGH4 SKTm8{z?>%\ý.u.Tey@0X&II4JH V!3ʨE1+csb㹡!$5/?ń"𐔧Mb9ō[\Q gF\1N0lA(ƾhjqF\P%WӈR/`zML$4 LmD[#EPN| "012gǍ"jDp1ca㤕䐍2r LzGJ&ner!J\Cj٭>$ZjOzq'_g5)9T/zzqЋ[-gt̋]OVD.JL2cv<[+nQ˝,^ QJ+՞{Swam^ApC$:ykG?޻(oO אHX#f2ҁ׌$0:[k$G%S#>W歲y[:c6-Ɔ0&#>^+9[<Rrˀy6eq].' H*V;wpv{o`_XTeiݳ_f~Le6MDn>ZvG#ɽ$$%l"m3yCFV31:]ގuV1qٜʺuUa*Lb,[{b x}s~Uf 9༗"L(%捑1` ?z˔D`Q,5aXC$\{.ALJR{@ {,oE ̓@ђWbw?hmUaݘE #Dn|rxr:YŀhwY˵ Yq> `&$#mPBQH{j'UFqN-K(c1;A{nc蝯]֣p7XV>o ;6Npwb5 "x38a$AoHQVMpNBIX:YJճ#^98$fm6QNbV]j |[|R~]}HќڕL-U_$zEj{U4-gs_o}52>~%PkB֎[֣t0TϤJ1{|ż۬s4mX׈& Udƣ 7yʞdOe'2YHɐ]/Eو2RHY+%^'Mxy Lǜ2u niͅ#EFBVܔ(gt,p9dMWm8Lw7񕶾x3#ɼ#QxjF]O?cCp0}ZႭR$l6^i&`c>9*;q UY_&pT":ݩLArt`=o\9 YC :v8~ϗA™BgnHz^zbhK ~r[9cr}GP\tKmTQpapNELߖ\hKuiͽ4*9sng[ގ|1 fxB H2l$8SsbMxC[$5Le+F4$e4dQ'g_%p8>Dx[.,ANp XMbD2lRR:`(R uPsgKFP D(o$3J'JԡRS 0F't)*\CΠV {=ד͞B:8F;4QOy\eTQ+M *}/8:S]^1w`i$΅40t"X|TV+&b%}:+P4!{,آ!pGg/w|Fi>X&edQTUAn=oFΝM*'԰(U6\%cЬL6R( 5qN)DtKJg }%M$F%C 5cN.PqNl7S& @'1oAu[C ߣM\vh˵C$EcBEHHWَSr"/A^dU/b@@uLe6"2QXD 7 ;-8JQ%9#M:BEA&10Axy}ԖhI]t]MgLw!H))(S1Fjy&#O*"Y(P+WGȭQF9PQɶԄfmM7_-{[[n{Rnwb}9-;r1/kMiynw6+i55#orѿ̊3^qgb&T7;->qqEN>qD!$MB8* |ǿ/Uu&p:㼡UW5x;`_,A1.ԛ?_屔h(0|o#??QKwfK,* o1^Wᵣ8kNۉѰoI%'Hb9-+WBDJι/.k] ,}!ȾyLv}[;s8<4ˇmox"+a$tZ`T>DD\#ܺvH}{ceG(E!'_7/uH/D B,~Rg@a@h TRbK/->'˂w ٣n^9`Jk pZ)C]j,mSR$*ɭ5n]{طfTRȟG8-e~rP0wlx{O)DΡ^M GtC$ե- \H>tt={ ZW>>VL8mk|DiD TG@Z'f95o-劄~u2~nO O˜t[FPVQ_jM5UC'L }jT.SU{tu6K)xPURDJ]%`؃lCN~ondײ RtAB =YbXrK\p oAɃ*fe8I@.e eV1'8G1%b;םMgw?_W sz;oPWl48AsO{}m\۪gϮtW)Ab0HI.(P"X6bJjbkKG/=|/=Xo%X![ނ‚6JP]CbnEFj9qWS@H\D"CYG6j͙DLIǐ<՞J1p.tml:;80$tx7ǎrw-_S>#[6lxUV;DpC :qKeƊ$r"PF'=/8o[x;tv&.S=A Fi<9>~J(hٽ$<*ȶ-!LM|KoT! 0g-OFNd>Q>J8p=V|~&p>rön8bq4]7#Cs~b[hN3w5;LZx^ƠA [{|ج.ffYo$ީﶍmV,)Xʵю޶mq~jF2x_ 7*"U {F3q¬tUyh4ϖUɿ@1TErS畢XV9@}Kj|E7u׀hqa)^RS %\iSH.@ 9cI+ӆ󿿼Of_?4_vi^g0ݫ|r_Re~B__ݯ.Wqqx+z5P?$W<ޛ b2jzsj𑧄+W'oYvƆyΰM܍y3dbUr:t[j.M;Zїy]{fBOݨ0TAg]3D @q2HYEj+#i 2 NN!ڑWD]߁yph0{g&ʸ^'邋Hw R1k,N;hߍY9š=6{aEz/n)J9}"'J-17uOoy1Ƴ=_38xR}?5_ij[=>pTf>e¥_&Lz #~z^NaUﻟa5qg;M0[%oilC7R J ʏW~;m4aM)re.^z,ZR}BIa'tbXetDbrTiMkc$xHRrPE1RnT}:&!٫J2V:gY%S\:0!:tN->YT `5C.]EԨB=/;H)~שE۶Yo-Ofoe`cTZ"֬!WS84˪BzG5>z``P YHe\Eixo6MWqw{\*.޻)2EgԔs^E^=2uso4|a~+7dhZ)PySQ y^$.z499.~D_.\O DWHy;Kl1YPcY[_j Vz UR8K%)# Hvc*3*(Hv~4b˦P߰iH ;咢S >u>enZbh (nvܢ8|WەLXKj; t3n>hD\yEc~74ux򟪱}S$Mun]h*֍S(p]4Jw^fã:yE„*5G.pIe4Wy0c$\Zy\J{\] Bp-W W P-]d%JSIݫ;.k./I3ER C&)ib˽#KNᄏH{Gix3|cRKh%ھ@Pyٮ뫝hmpd8ٴUevӣnobWC V=iY{5'{b6I:3w}J2*lD "r%jٮjRKt46Cյ]zjm#KPIIq%HhL05:\ZN;GL@\Yp^<ThprWԵx\JFiz:$J@pv\5ΞUC漸jVQq\5S:6+6quqO̊h$XP P. Ju\Jz\] ҜL(KBTq{7jPn žoVIQPRTM}:]pcPn7 Rn$X:M +hV2A:j5nl.Brx| p͛|Zz5^)Zl\x~eXY[ +7o[#Z{uߔOm7GYTDVDɡsBhVk`3ZFr\3cJ<`Al4BLǂ+T}\J{\] Zp W(G3jO={DQzpkC!qH)#@*Hh"9Pkh#9T%FrYmDBgH$Q-]JrNuLǓ'G* Zu\JN<%Jt8{K{ZvpFEtN7 E"TWχGՅ]wP.jVvUC,/U2P'ory<0Txq*yY^"E5 &\!bUBfD\Y!(i~; YG ՚ϊ(TWWŐ-;pg#짽4{U3L`3c=5} W(hprE4BtW\1Ϋ wl  1RTS-BҽyDFGiU,F^%nc0-ՙDRXM*~q஋gկ\J&eD'~1OKY,'gB4y?I5DrYjYu[ҏzEFK C{t~ 7woa{Nཀ]Ww~.~%]א|[e6ywwⓛ/\ hpIʙK%L߂?; f>o`뭷_^*tn{4Wz,opC7K O^o&Txg< A;~q*v?~X܄"VJofP}~S'L\sic ͹T>JXr)LM3ssWGhr>creH|eV Ik1FT{CN<a9BLI\NRS*Ege쾘Ji,mv)eJ&bƾTG˲:CT|C65 ӊL}џWV?J>Vժ?έ>lsR+#|`jSq[׳)wp.|zO)_^?&,,ۼǏ,>YltȌb9 ~|=^t ɵDCk*wC*I* @Ex\Z+N]r"όU,W}m]j .4ő׻}X`چgm@wo!Mc0@"xM&: Gwt/ }8]6 Hna/`!БՂ_#KJxdYԘq2 >IbGՏ *ҡ#(:V[pwEIAmo\s1:7H 6:*ku,#UVP*Tl?Ru#UF ~:fs^Yep Ά/nl!c +d'Ĥ&R,F>#bmbz3idNoZv-E\ӹb:ˬUsNl:f9D4Lʔ8N[nV^Qa}Qd 16 SAxm279幧.OT*2rۚ"y q~8֢Dn{ lHRnU^U-xoܾ:@9LƸ )3x* r,zYj3inȝd\(P 2bUJfA%ſXF Rh$q YJjMLrCmM 50*Y@I:J柏l$ԩ\r$|CEpxX+)]JqAՎl>4SZ|Ph}`+lP'WEp{w #rSDJzn${͹Q%2Ԧ;:EˍAGLQ iuAJFet})c|)d "ystIqG.HM+C0!NYTyf%q<=RYKR-;/ x {3Ӕq)*5^cs&lVY2-59;M}% []{O`93{!rD9.,8fqY+%hHS iTeT@  df2#0 >pϥ'A;2[<~3o"V?$"'q}=Enٻ6r$41ŗ;>me+d_Ւ%-eʖ= fWU_@^5M-511W&XCZST"&WT#Ms@8 $(*@=*hI3P* >ֈ|wqy 1u}"-8CcчŸc_} =^a5:_f|t7M_xs%; 7zDp#~OE np~%gm#%1?G؆/xcfɽm;?케$OM5M7UkR֓!Pn [uuDn{؁ ֈ 4*Ɖ*c %J8AY#&s*ATE%*Whh>FVG#ը2\.itƄܢP%OJ#q ~g9Mqw}߲AM0|˒M>T\R϶$P,Xt"MGԙ۠ŽCf둨nGz,8V?>,J={Ky*x;Tt[L죶~ڡwg69๵Kk*{٠-/u؋-}`o'i>)kh`s$ShSE &%0eak"-`AY*3h|&y|txSIM3]v0a^ɄA@Aq(`xxDWk.I rj$1( &o3~#:RGqcN聠%;I0ֈ tŸ T'=@K) gqC9@Wqԧ{_Wg2H*ܿrmwD$m>qlBν^G$1ރgjl]cszzw8#A#{4e-W5|<=zhUwϥᄍ϶G:^l=Fƽ;y5WjQҮvub8~|rlW8'`UTEym8.qVx"J8%,x#k5W;XM'`?vz)k2Q[E n,*I :~. P6[" J;e)&B2΅HG _TR$2;ψO9TB1r7Jkw`v~șW= 8>sk3Kr/Wja˽Ep>}@ͲʅOQz̍ iA@}m5 D-8^'KmJ|> sD('ltZ\9v@~$|܁fkӊAReޓe&PgZh.P፧:Cq,<Ѩ4~Y"3R/3j6g{}g 0|I6bء+CwmW&+UE-FBzYr>lO+gMP>&>vC7r%rx9|[9TDd'\Yb $8!@r‡\2hbIJsTڲ)rK>H+$䤎Gv)XC)r9;vth1rz@zDpc7 2JHyDZm&tk\So(zΆW'VDGeY/(/y+PIUHhNp"d2>r|ʨV)ɚArIn}?H g'42t" (F x !s8D( 9'ό/jI\1dq1R ٹRsc¥9U<tJK"'rVr/G$f_n<=Ũx%d8O9F:#s'62TA)aXNAJ{VQӯD(Rr%mRR "T)HR1@bP/ƃ4Ik=R $je 4-uQ 9-V2*ܠ8(y\\_KjbWhN8#x9}}gT=\&^p|?Q UzQ)6墡LT.zԢ5+9kO6N,~:w& ^(5zx! mn;t]x߭R/5ftdj:~X^jji{sS9fDV2z6/)eP/ _?VȍW\Yפ^ͦ7gU?1zB-|-"(6VZDlxNDxV#ZҢW 'Dh>P@I@#WHQ"žZS 1M]T' G -%"n>-FN8#g%,!/D/L#owGk 5^W o}tqW2fp(Y~ pu"Nxq dE]Xd5pjZ3OHzH* K^$*H_CB.\ԵOMzh{q6z;;qhUvUqR-]^oQoq=ߎmr1N:63|㳧eH?Cq ]5Cm/',nK^R%|cG4B1 N5R[$gBT"E:RH5xbj'A%rEyTRLXo]lsJJM$b*khe:%* *7F+€:9 ݭ5i)~^1Cu[öz{%Lj^̗6qD k: j0hD.$J %lgC.6%H&4Т\VI3bjRl-5tC!OtǹJ!fQSH]3G7g \4Z0"kךCU5aU9ݓ7WΦqjoj PA kBY pCSF=xVyrQfQzz%6XX۽yu1:AsYپ`ӷRC(e <T˘TIhdv%4sXК$Y/spŰ3TDg>xNA#D#d^>|b(q .ΛLnrUi4א#7##|KIRT1PKHc@:2A) 'gRXrrT@ dZ_z9=b\pǤi|椻jқ9"y#i,wquxq7Ehl5 >gQx/x3rl>1Yn~|ن`6@I u?(2g:[AtA7͓#[Ƽ D3)y2\~`|8N^YsyI M$(\M.ƃ' %}nC?J #Ie7r ?Bx6z8<5\s5n!I,INK&EpoXd;zMsDfj|$)JxM8'X98L$%m(t ECwF.Oi\wd5ibڸ 3}^]en?h5/dUM/}˻:r6XN.N,ĦvyrCOP'I.l~^)gd3=_3M6l(pƙ|eM#u.t8'XϦm#;!|M̹هID-4Vl|~frzc]~o{{}=/՟1t}EWZ|v׭A z_Ycy{ yT>T3݋l7^}wMlWnX6O !ҷk[{5]jxr]n5nOɠ&x~4:vD1.e]#U:KIٸydh!]jnWڶcm_][R1]@BbOw 8dܫ2;9l@6Kx45K)dNjf| ~HOqaO]ՄXC$AE(":E`5NW~h9v\W?fpI̗h-⸓xzWrh j38N켶7T֍}:@8|v|un.?[=M<֫M@TurW獡X7㹑eoh:3.qgj{8_Ew``UE&5߯jfHQ(H1'$q3twUՄzIM)4Hɕ&;U*"\A>B4~n o?xi#&75iWd> ӟ^p_ZnOnzOD)V^\,hǟߏ6OQgpMS/E{R{_4iɐxɞ]轋5g`eVR1aixoBY\uuAD˛/:x2OWӅ\LGpjS!u4PSa^&~aP&nѢbbf#0rzy"Rۼzֲn}% \]M-<;0^?ԅϓ.`4{5= ^ZCS"8vj+oW"f8wTj|b+u2RQ \iWR @qH41vr nc{$;ai?Y.{jpsGlJ))'s枑R[Û]voVQ3~BRz*BKv3 e£znwƫf Ճꪧӯ@(ުtFnvS[uO`.4|Q+Ó_Ou\T/IY Gv[>G:t/šP>mђndjMc'2iLr`)ău.%/Ĕ,8e[c3K XD.44 3xxoR11i5 xh,T-ۤб\iLQ8<"_+{ iԗ::+>oq8)H˻ެ^A5ަq1rhRZ>qR7FXo~>|Y:~Ji^WWŢNF8?6k.k۬mwx@[y1܏c(bO!1D1Dw11<@P#]!`zCWҾq,DI8!ҕDS#BJn+DkI B:@2^7tp ]!ڧ6]OWҲV67tp ]YB Q21WCWjîW'xj;v\KKW[OiPڎŮtڵ12=+L ]!\BWVڮlC+.ZY}nKڦZu3>;-^,ܨm5͚Sş Wӫb:E#9tNs,)t-=%pw^oJ輪(ĽqU:^Q/^Rc>qbsZy.e˳-5PƀWoirB*%AyGYfqM_Ax}EQRKϣ/`9:❏zwDF{d7 `%{c7 \-b7 ZyPbp8v֢Gt 폛p ]!ZNWRҁ$L1+kU_ rʻNW !ҕT>Ìw{t3(7th6]+@)W&>:TJ'B?tpc]Z} Q25ҕ1#B?+Io ruBtute]!`{CWW<]t7z}B\흮PVpzReלA]遮vz Gt+zCWVۮd+p=+l ]!\&BWVЮ|+aWati{IDSJsb!uK4l`^L ;&ikqe4 8[Z=8cMnGrIhȥĖ^z[f&ỵWhwq\cDVaX+"=,%F-iD4RpK(1dpQ:i;%ݹO@N}6p!/~Q23xr iqB7th:]!J5L"]IAVݧ+,'G7qrD:?(0wt'0%Ypo;J!ʕ5]])k)8oadB!C*Я?p9LO]R k} dQ \eٯGş~L߃>Q/6Uz7e^ݷM^m# a%4/ hWxtY7_.vZL0/ы;8jl4H1Ub++ R*ťJ>rTIKKZ=o[+~J>j<)^OG QJWNlZ@ʪfexϒ(nQ(-6kz 3x]3NJ?_,\QvKҴFюϲWfI)SyX'puoL5Dk:Y"J;T3Bfp}+# t(iƃ++ Gt]!\ JJNW遮2v9`iƭJUۡ{PꎥA6[Еj׮RhmzDWpBJ5]+@iq8AZPSIǔrf7kwnQoʺNe6ëj`Wv}Vx86c=Pfb7 Z-n7 J#@Deq(Bt(6E?ճЕP)#/th%:]!Je:@JJ{DWX8Wt ]>YWXµ/th]+D)g Jյ=+luJsJzB_Q(D2gˋAJLQ9.>ϫ=WoYq>)., j着-p&^.wh~l7,7ԏFI~wu*|]ʺ|ݗ}- 7R[M-kS yy{Cw;VTTr3PE k7{fY ݤnܡm=79R>Й ;tb*ʑcKwKݮ M2/&|/NH :&թ1a s&MNTy%2@1\㛍z5o 1Nd#}W'ߺ9j~ubɴc+b@YE+^WYNr+/# *-#FiTlvz^[$NBR8'.B,s(ȚD®?f]cS8TRRrwc&TUQI Yj ,Z@{+S) l-Za V6\J  )E%cΰTDT#Z)ϑJgj-ZhZíb[A7c0RRW:Le2܃%%V&*\"$[ : A]LCЍ1EA2( m}`9G"@9xQ+H4aV[^؛*ӨA#Pw!ƄN.ib0Ǩٻ8rW i#6` 3_vXvni,簺X]R[ y<, @02Ut4TZ Ql>Β@+jG/o0VE4_ 黺LT?d|H$Y'mX /@ɘu!gU>(d| GI/jnƩ(歕9QJ(ɂM seNJH ~ w:b*ÔxHQچM(%|'TIFK)$8:BOhk:C4fmDiivJcTCJ0@Dƃ_xH$apZJU6z\.$DQ̨'餞4/&S64J4)*li_"IMI1QIeXeM!(dGoOUoTdeCގ@j#_f| F *1c5. [݊NVp Nk!N8v uqqOL$k`yU+)qh4y֒q, jBGGyX@Y'EθдPmߦ9u] %S0Uಮ`0kvF#)Mٰ/A hE / ٻZLt&ƌu1)mh'l -Z wUTiXZi!XfT @68#  sg4`< A2*dMg0M(e:.|-ƲΪɨRȠe&e1͠j`oB\?W0c)(hPNiP,`6{ilFf3ƒ$3|TFݚ i"Z8J(46x($1^{4 {xGZQB>84iȠ }n@}rnoe+f("f"cD $% !`BDEfϰā9->9O]?/jKVд+G{ u mZ@-D: ^xu:p8olK6!KLAG=(%WZHFW4wSPd֣2:/3(j Tx$ E&rZLE2^3]701%$ AɣJA&حHې 8tLt#+QQSP5WwVT ^{@#BK|v)}ю?b:EjC$*):]%_>&#T˺"(۽XRr,a3-E*Z5%!:w,p 䁀:t kT(%_2&$˭fVE̓A;J"}P`joX 0%V$kJ7VP8i" (F6 %zʤO38%0h*夵BS`*q˾ %&.I d~qXoCnRM%e0\b7_ 1 `Ve"$Ct.Xz꤀+-0 YX 弭Op]g슆3%8D'%@/ _j-n^ڰA4s+saPj?t .Qk}h.kՊlGa~ť^`s_K^vbn&:;9x 岯lH//v}Z/sqW:n?l/|ԯo}YlJMv~zhwAd6a>lfcaV^O*2\}u`Ի^Y|Zf/fWxAW/ٺzq5ּ>_ͯ=V âlū*kCk7[K=8E̱ 5jNK[M{I6t-JMQUEuh$K [-e_*tep/ ˡ~زCv.0.<}d~-P}&#]-6%ohެs{Q=Sjի-.&؜uݗ]` Lr$7|xu[O]o褖>oo%Ζo2?mp>]bǴ>'ok}A5pڅt1_2ƪB &Vw@˒4f&bڥgv2\^#W\{G6EF;03!4v}[mv 2|".\vj yu`驿>_/?{_i{ry7Ӿ|yg8ׂ˜#}ʴFz 59!g 6)=ï?tT9V7-_I\}.wx8\^9;7|F<ЃYߑ> <<J79ż;v&1J)A: T0|d?9fb{2;[ i~ dz\ϕ[W:&[Ϸ '$Zc$٬2j V Ee^ILM\ˤz̹\^t? I;\y yLG`n>opCGguoExdD\-PәmI/TdS)]kq>'P\4>ݞQaސol()Eo>u5զhѹue+`gt=Ѳ/' 89>S6E vB`LGQZ)Edɢj`~$X?#1{_1qd.Dnqbo9_{O`Yӻa[2?Ƈz~Jq8;áX-6'ù3 mЗjob}߽7؟~bip򪏱r}ʡ> _83W 2ڼ}6lΞzho ӪU--C <SJ?m:F7|rUͅunT\vxlP}{3a:/"oӏCj}>uc+Z?{F!wB4cfn.|ښ(W-1WldewdlbɪbEk%ŽMnC54y;uEzxu]%#C:@X((xv_͵X5jNozx5Ƨϗw~Z4lL=aZS;\]7`y3|ڬ~g8936 % +rJ'++i\v@>yBTj3O&˖/o&Y}?Lśd6?ȷ/ N Eoy?nnơ]㑆/3 /ͺ 4kWm) W*N`D\mF&2k||499U(^z{뇃n|#z@[_ql׭OS[_OJ2 h#ƶW[8ÜyI}1"k[fny< p ]##-7Qr5RU.fL Y5LU6(^)ЧۛVXx1X3Mૡi 1S ꅝU_q=Cړg~֬X݌8V̖s_]pu>"-Fx*'e|J>OT5nGUZTQ3vR>ʘs$W‘A`I )&N(Pb@)aVyS93L5gRu`Kεm}܋w 8u7n3aWZ(]1<#85 5*R8*N*0O ő"ˍ;BqeLGS[,G˄J Mυm;j"x)rF;"1oDY"G)jcDFpi㹳:yeL 2+4TyapKi!Upb#"j&E-c ݧ̛-7{^oY-n:.s}*@9ޫShDLS jA&nZ Kso^-ddJ H+*'mF+JrDQ܏ ;1}8Ϋ6t3fC_67I#p'J8uT(#g X4 WHiDOB﷦ښaJ*9} rFPUKfѺ iRDdM1Wq dz::L bI{A3*rQHY'JApoO6lL T3S3p\go.:GF DAh., ⭥!'P)߃2ShHOJ"zRyv g>qϽg>3*ywcW[:{V%H%VFP@EK唁'>N{&AL'J$g`-WQx6Eڙ(*} [N' sUqP\aVԚq:($Xhap+} xu3 ړwn@:BuI $ h%&pc,K99`O\0R%HNG =tSP4+CԀwZ Ѥ]( @"FT(#Ǩg4*-XT'񨊎9Tv6 /2&bIt)Gy4_u{1.re"Q)"S$OpM ORyC<6>;OwUVkp17>99M[6lT_:ӐW ~ȃm"s^4?_r/rRɿۄS1KOO88߽ARNi5O]=e; $#OQ7|lkYZ~_ie/.a /4uBޤQ.".id#ġmuc)_I^N"([s܆J<|us7w~y3X,হ1N\^?`u;[${l{]&|r8r7A9ۋ?_zQ--~zC^qGzְ5YsO"NreUYRm q fߏ(},=JFBZ~똊{@׹+F) 1PK8ٯ "OZ#U!2 Q[aM>cuKPnGR\&Ye qr0X6Nr#ʶƓ-yR&ǥҁo{ƢQ~#eC?GiYK9_-pF m 25gӴa(.TR5G+-ɣW]wH"ȵ4{b*'yΜ׺)!a} WWt-%\V()5WqDL4y:%*x!T 0F+H׵Z)Pھ'=AlY>-]Qi[U{UT=L볩zkj^LB ]hrl%)a ?E#T'ix+>G.܁O~8I03Yچh]N)r B c)@DTtSI2q:V&(fom)mxS'ٻ?VQD4&" N@I K")%Zp6E3om..?}`lɺrn~^ŮY7=y&|3e z䀐o,j_~Xy]n80q.-}W8b0mKP ((,>.dR") mol ;j*9Ρ}]3BD$I$Q7QGO4yTJD<4\=` 3س,M@uHr'yPm&r# ,]Xb~&1"QGWvLOmP94ӬD&[@MzY@ pNB$iA9"d|0刢=D&D%L{#(`]H9g,ꨬ-, g[WyPw8S(ozg z.ڼUd\]yf>p{t*fEqlUtk,`Pgr{s$s=yYj꬯nOϛ\>_<_J<)9hϙ"jMtL&5KY"ՁHOG~4y7ՏKz@ k--m kǩ%8piJFrAv)!$2b A[\ . xU%x$ɹt~g .0a0ᨉT#MAmM>-Wc\N|;/?$4d2q8cɌl''zI}ۢBH.yHfӛQ:Z_/_F1yqg)oFj/Š~7ZHUl]sO/LPz"8P%xfVDø'-2~ϼ+{HV~e1J|HviΑ=wVMX+ T\,POĠq[5`n&OP`sde"{nA脟yv0)GKeq}0w`.?L ]3ۮmvO.|x<shĿMf[st@cuϭyt=.6Snl6b [.v>Er7=L'-Nn|·Aokvּy/R:BK\-}jSLEnտ)Ǥg2^[y+Չ_ݻpSɮKTKV⁦='@ YR֓>عvf Ndٻ޶#W ?mqߪk?mfA}Y`1[ȒV[MJJ)ٔHaȂx:6jE5IvlXٱ/&z𒤲隻ﭻStt P..Oo;pTuf9Wi+BI$Wҍ!Xelרmz&(zvw^Azhkd2sE*:Ke^uY~l7l%'_j>, &*\ /Q 6ogyq,D^/y?&0 s1~h /nzn"W&ݤX+;+M9f&{ܥ1O~H&{X/Q=f7V_w;+m4y^#GANfW?')ܡZCʐ$Ltq.]6\5M,\nsgݧGYw;Ϻ6e3I@nK2-]D/%ir!)+{,o6VǠ>3NV.!JAKYY(U&Xa~&PcI f52N>Tk+GH]rmQW L7q6#>ढN1_NX$_=ɓĔ">yrkW+7Cy|ZPMN\ 5\їY7:\-NmD HrYXwP$BR.*GW@n W( <'YΆrBQqhj6 ;C&4se}mSCOժHugC;/dfZm?:EaK@J^2F糑*Y}(0=@F\jArPrАB&_8 K98D& C1[LmPjpj@+A|u:'aO 9U mG4_I[@3/qh V>: i8.cV>_ |7ؒq^FUDtP CǠ`4ULD(˿"UYEy kW>_Vudxn*un;70H:;ƊUW(T<74>Z;E:$3̣y܁鐆CuӐm#t-A?Z`t52O#8!&7˥+{O@u6$fj61ۇrG Br)OIFD1 $)y.@oF#=7f,5Al<ԏ (Ek˼R1Vyh$iUsUExԪumZgR h>am(9m9B%̒ߒ@,įV vv|*fjPh -#3O٘7Zg%~ eS}Eo|go/u^0͛ oQe_)_N;O+S)RQjRKaZJz!H)lr_NϠ2ɴ-O.4̤XO>h]>W ʄXeB! oKBٛB6$6]bpۀ fHH WVkhc-,rԢ SnQuBlRB1Qه;WtzDm+/iO3̋ Hהfr?d *,=y!}Di"phxӚ oZCMkR" ޴o7͑v]Ѹ&9&;xwդ37:N?nŤ(([R~?. tM_-.Q9pٻFɕ`]p޽lXSnמ`w;\>Z|4EgOo?ۿK\U>ᄒ[;[q"߿E]0|{r*]8zSuń!$AE*dAFUem/Ѷvo[ܻ?wopr5ۛ[Gn 4c;c;c;c-A z ;c;c;#:$ KM7VKEuXMJJ}5܆[!PJ9 `\i\1M.Q ~ _E_,oVxܔ<˸*ߗc[kl :42zZrrƳuz\(dƪ+d$-)L&(Ąȩh%Q=ɑk+?T`,mvЎT\FRw jA;q5Ըݢ# ʘA;@ҠԸwPjA; .!X?qԸwPjA;q (^;;Eq|NfW??')|(:O(LN.ى,))i,ppd6Al5%sA-h& ǛJ.&LTK1mF7-*@ IWLYJuZ"⢃ '0Jd vgDž_>$H!OHhzF-^-4{*r[`c^Mza}}(-W_bpU1,LX;r!["O&תr&\c %uM87:][1 N3tjEDUkZ*@ rPj*5^'ZkBA 5bWD0a9]s-pZ[)ՐH)2Muk05ބ yQ6z@pRQw_NX$_=ɓĔ>yrk˂h!nkۡ?r/>h$H{%ףb%CVM)W umD HrYpԫagt5HtU1#0`k\Tpl$ωbxܗP<*5}R:̍ʰ3dBE$e}Ά!88ԊTd;Y7q6]r@fl淬QT9 %lt>yꪒݷcjĥD Lq % _5 _8 K98D& C1[LvWbjuZtI#O3WONuιj謄HI|F'm[{R^NAJ 4}tp]Ǭ^}mlYt^gzqT WżyE ?7Y~Ag p`Z8t Z FSHt+)RUN~e_mPlk|n;?70H:;ƊUW(T<7)v`u$'I4fG?cw:iȶ:[Gg-xz2O{`}HۆlB&-*`$tVީ%bƒLLEMCH2qE)hI0~-)ʡfP/sUl\rbm`HQbwbJQEJsъK!(YngRA)ud{H/qƩ38?=?/9CW 6)yYH:f--2rT jg nXFTF%UoN%Q`QZ*l ]bɶ  B9]/q6 K9KX/h9%e!ˤZQtX @@IIyQ| ӭ ]Po7xjlsU0KXI It ,/T & dj EcLOaI]c| >0*$EJɠYG a8.k>U7B*jkAR@&5}8IpMܠe\'$oq* %Ej{!e"yj֦E“QDK6΍pQE{<^vJJ#^#1|5*{CZbzJ/M <:Hqv@ӇuW jǓˊ&6`Rv&a{ u}XG\1`X9- r1jW4ǶGJ{D-`5ӱVǤ{1rw}wBv|wPm[vD^YmlAVN&M~R:8#<1hIC)[5'ndX*bK㐪 'V$hXRoͪ(1 YTT߭ = f4Ԛ.eMmy=9:s׷ZJn|| Bqk!ٴ -έn?\1vf%kmYE1ꛁ`w33X`gf_fA_MG=[MQX%S61 bYlEvUu`˜BeStuyʦraK ۯg Jw\'RDAzIY 1Xy :0DeHAE:ݚnQ)PhSn|L. P̅U¡H)sٖy`ԻPO'E;Я1E#HyՔfˊ_/w=^ujI-[`sJgiٵ4kJ K"^JAr嚇=" /w8u  'm[ZIW( #ڄ`45bCqYvV,i^Vk̠BSf FAHNJ:Ar\bDv]h9hY!(-yiN_Hzh_yeUØf9po19lBR<;p_B|btX̨H#D?0L#<{"X q6-%LtYEoT*{I2rHhl< OQ]?*Wό/.k3L)Fb41DSv5IxOON)8 %}Ҧ֤+:#V-C6 m"j+mn7H߂Ȗ6@p3Z{1;jk뤷́6e!-pM(2NJW0=Rz˲ {DVϙg({ 2K) J<xt,џYa<-?5~t8>]%]pF RWC El*.3K^%1qC.@22jbҨ8nt}}[:TRE"ó1_CsnX;pei*xV\<]h{7+_EMTNEl럖3%7jb/LUJY$`%#Y ݹry ϗϟNoʯW?NN'INy>`:a3'o9)x_M|?3:g|ns=/u7?hަ*x~arOvثeKҳPëG/QnvE216?4[.l4ǿHzgso5,oeӰl5oߓ.|:lDngsKӖe5/jmE-Ksv|nnX_bki׵b\"&ޒ]%͛[.㼺Y 귬6; Ͼ^!)1n RsJѝ4#*%eFpшъz֭(`_NK[U-lt;3&a\ EMO1邋Ȳw*yb%3k, Ml |VuKskc+Lr:v8'+C'g'?SvNnȍS0[)PJC桒DThJW+e g,'cl3t-cy7hi?eo3%FrM418f3hcy8pSN@gTG^l¸]Js>`O3%z9 KF;UBV ,]-qBZWl[! iֽ 9[ݣfg;~]vuu)EZkX;59jw?٩N%n}ZquXNabKV\s&w5 LJ >Hrˊ6AQB "qԹvӮ#5{q-5rn3D2tƔ אȑ M6VR7Ed(oY[2HԵJBk 96BcY(5N<9@ Ш6[ n[y\wOYEˎv6-aIڜfC; 4IslA,D؞fX)eeq藦)YR"Ab JhEb&.JfBP*(#Q&S = 8O_::$L(SJX!ɋUT.eȜ`5fc.e\i驅i8o)OWVJ2酣XH:;ɣ4(Т"lG\aL?3VnCocw*51v#)ma{ {>qW]|;ϴc}W7gZ!IHHl+5$20H$g $vh+ eVl;AsєJ )=s@H-UT9 ݖ"dXdoMd!#:8Yv\dՙCe,8Ri/T˪ck<$bFctYs6q > :RtyĂ]YҜAD-N1x Y" 6$ g˧<$$$Zc+/ךвFfAMgxSRKI{dM09zx>ݳkRwy]e)q%7Tg͖&**? H+qFed-b8:tR1ǃdp=hJG*2*s3pAГ DYH^DNd;)pKt@mdl؞^6q[+ږX*_E3^y8Xo]|4[*7M,-s.M,낿VԌ)SP9eY$Ú+k/yp"$@Fa1\ %'F uHWQg혱YZV2Y&mdWL̮vkԶ=j1 3cIJʄ,8 MK;pZ$I`$x"k"NJH2ݘ#Y&%0AHE_8sH$:iZٌQ̄+Dl?vDĦ YkkƦ"Y%O$<`e6DF W:DJo)d'mVF2, g|`RIq Ʉ@@ Ns ‡#ZZ#g3">T{ٲm|qɮ[Eb֏a2/B4%/)ˍ"2g"dMfޠg-CQH>p+xw슇2ѼP}Q^#m.ȸG?>STh:^^x&Xf ]b_Ot‹/+ʐ/J^Őg8^=`# iBm2Ǒ 8jqd^G=JR%I.jeS Vnsst_ /EQC}r5$ұ$z;pLk_ɲ2RРs~Z|UVb6G@rG ..|O7?8XXi2ހ~Dv,z9 n\M.; ΧKQ5ŗ0@}o_-lLq2L ʃpLhz0;_rG3?L>d 2cdqe@^+]F0fȥ% 14p!c6oE[lK^ekpJOm!ć51åMĬe!҂*8 >ˬ.cHtV16${yijkV<]5ԣwk0wj[w<;ƒ&e1qs0B<wLV鮻c #æ^23 $X煫-UYjKjyڒJ-p.Sϙ\gq<*:\*pup%V\`& \j:\*UWG W]8V7Awӆډ:>;Y~g/NTLJ[_ޜ bh9iΑstiR7gRÊVy1e5fnӵo6m|ŋq|I p\hbLi]9wu'0:quBh PrFp90NC.e И~k!LSNd *_iL<<|!ʔ iǼQ@f` \q0zCUBcGo@V9>-*\jv pup%K.y@p%f  \jm2D%G +#,s0pEZ& [J赫c+-h:-B<ck,;pET1•(4pE+v8U!P 7 (z:BBsW'ȇi \wUwPwWS_2ajߊ`ۑl nGz&j;*u\| =\:`  WJT=\!\ clA \UUVU҈4\u,ӿܜ2RKR%[<,r^L[Cw6pާѼTIPaiK^ͧQϵ\E8#[<< D4;d΃y56jɃUIT,VVK1D"ÐDz(-@|N9YŽ{˽e+6 |ΪʺR~̮3A"gY愥s8 km+G 'X , U"%%`$[% pP<#s!\Uq<sUUxJ`GsrO>]e#/tV%_~S]rL복 YJx~NO3ވ*:K6(:Es_t2;ow<%akL Ի u57^|p_fU5wHT>ObOodAH "`mҊ D!AFlL [;7:NLoaay7=|+7h߸ ySOWz't[o+[-frkjҗ0o!}Ou_ хezUYU?=WG &>ǿ9D/WսTyh^pp-nYկ J &t*]в ZN'YA+e:h7[|]/9bM|)YW[,\vA*[r6f1KOR'üNSR8S$_i6(%,)/|KC()R,ŀer 5gw~Y/?mphjS]Z=^ôVL d6dzk ݹG7f_g/{Y-]/W\>y6Bnm6IʼJ1-Uj-?jѵG2tܺݢy8dz޴^Uotf!ewmx|xwKGk-χd-ztxלiwt{|QE}kjt{LdCyy:S;0oVG8/7m$RRjj}eח5anI}ѷ޷_aKi.C܆b[eZm2;wyHϗ 0L|eN]6&Й,30w'M3 p $DF쩸? ,SMPq5mb+!Df!ZI%rYBf$TS:;gOFd ic.tF9MK0Y$|$1wo&Ξ}Cꮧ߾WwӫK!>v7>6Bwa|]4\Yωl!B?0ۂNǠ]L@0'R"h&/uT.6ui!e*`m #tAMm MF,2OS @)TDh٭WǕy{lk`ۘbA%7YT>ծaG3} Liƍ\|q*E-.oH,Y>?^tqc}-Ľ <==q9b4mC8}m{KA^|a Kr|y@˘fDtk-#f-!7:j'kHki0fÒRYAEzU*i%m,fc;I= ,BI:0K1;Ήe.rQ!}Lˍ<]1H}h*2^ӃTO|fl#z0ցq@G/gUYVgf:_W)ݐ1{<ﬓk=Z2nI9pc8~oލX̥2)!>b[ɓM42x5+"5]%X2͵oe-3Q׏X.FOŗ2s濰uM*o v{?dӫ_O_K#<6o~_FmC7j-XhvPeۺ  X0W:f]igFc`Ny;DbGD48c(mc\1TF< @&5j[I# )JkQC2CؤDQ>G8)LNVSo|xf^q{}}yv1.;?煠J{Ȇl>7)t7q8e0 &ꎂ Ĕ<Y< œ8 J2 Rj+%` YF?N%S.:f~C ACS(fɨ,2fi& :k 昄gZ%XVM`3qXSBx^!繛5jcX.lJnωFݥ8G0 :UUOV\5?wr)΁:;SrvR!T6ԧos;vPƕo|kasju9 Q2ZBHƇCB䨋GadhS&K̂$)r'h| -+jR@<1B|Ѻ`,>bPJDSsȚ9NL݈[DپX UЫ_ĵ׾Co nJ |-CvwwٯwTśIh i̬FVY] ,R0MZ Tl9OSR&nXf(T.%Eɗ޹XY3qԳ^~?!5oԺI[&*_~]Gb:uQPמJ6&5X4Fe#f}6q*N/Z;KbhCiRR0f+ `V3棁' .V QH(W)lf6 v Ձ$f,AJ&BoWP9z500f@IE69]+qT r:¦1sS,!0%I>fM($>ԑFeXט] 6k3?V&tB]\XoKE >&W< % n#K^yUɁl"ڝ tZRWf_iбSұPLFDQi@$Ev0^$3CQ"$$;!4\Uw}loڪ-ΠB@B3J\6x6NK*?DUJ!Qw(?/(E_oU Nk;u2u'v1C WщtMCO!*hzfہ1t dHv6+->x6CX}eo4ԙ$6bGʪP{ u=Xg<9`P%Y%irqa$M.DYASVpƽ5w&1r^-~[0Ĭ׻;O-mP盩P/g$uwnăeKn 3FwI kqO*&76?oLaXG)eVI 9xm^#HVT^=zV` ݱЯlFO5bKYW;ϱC!q ncN)aO,IoTǍ)+qS)tX.ʭ$we)9qNAfx 0ThtW\ Z%ZN`i:Y1<ż^uF)p>wAA/KasŲ + v*pJ-]TdIgJ$gDiV2^9l_J_è gܝ ޿AG׿I]MC^=|Ѕt}%/ccb21ʲU0ct׎9iwc#X^茋wCx+8W*ͨY5D_IpgA/WJu~$.l.F F;O9ĪJjv#H1#bց|< & ^NzNa4"~W_xڳO`gܰGl]\jX&|fq\_qDZClKW<ܓ.`2==oìM B7lxC5*ʠL65"Hj51 QO7h9ܰκ. ƀv?`}rKo8O]>jXY0=zД5N)J4YpGi~ཕYE9E[h!/cGo 6b`$jxzOH\dFSɀ+DXcyp@v@50rIWvcnM7v܍W}m ;U{ G 3d|0OD 2ᢅkz6 CV%btj&2,9͚hmr9EңRnt4]GI+2D@B"ra*CK嘢KK4鱍t4^S.S#*3w򤳓4J#Щ0Ǒhc Jf:;")[Ӊ#)j7: t~Χy8C)i}~l1ӇGb=:Uìv۬@$)C1xMK) "H$>]8\@bD@"v*9'6IKA>YeRz8N[nVQȨjP4%33xCmMPcp ώLb:S#u;Hgeؚ8tn.5ݩXC?>Us6Qz4a`|Uu ,lgw%P]/ -#Vy IfỤ-& Ix0͸^kbBˆښ8 5LQ@K={Ȧ0|ז |0XX}7gWY6V5GUSoT _ߗpVQI\@O5FeKdzN:)㈣dRkpa*2*s:3 '@ <.Ep:露T FَqVbm Mki@f!>Yo}p`voqҚ8G.v[ӎ]񐷌;N4?mI^'[-J4'd(`[яOhffgcl: C 5Gŋ0F|ڊ3.xQ}N^E^-~t_w9!0V~3{-F,~%-#a}3WQsˈ4$Sdxj?"%\gꢊ{1 _4ApHҐOD8w6En.[wpF^5r/:8b?U V5(ԇ_U$+4gV@E-5hVnP~S9ע^]d~l-,@OrpyZ4T?^|+TxC=ZO?Bv!ʔ"9^z\(a%+$l:0{-N׻^~Ysio"x\ ^+%eS&Alڝ#~InarQ .32,C8|*me.[#txI@/eZ+N%8o+8]]{zil֋<^6g Gaeհ y+rc@` ǠBp JXׇcPJ-of8ճD ~N[ԣwWIݤ\\;jWOVV\-Bq%Bi:\]cW=\= \1-;^`gTGшFjTbt2(^9[?!8 cD$[%xA1Z\#KJKҁJeջ?|aRbFя?;ƣ).F%(H=PQGiS~prGI`FWO %ní.RN(<հ F\2~5ӋKj(Fd׼W/|pZ$5N&_z}i0|/G核zo֨r4*k7p/p ºW(%ݜWB c @`J+;ϮPJ{zp%Җ\fQ\iPڇ+#\׆"|Z%Ѭ%-&n:C/p9ÇW6Lqvie,]u~2XNR2JS( Ըשb~0MKz=*6| 6to坧(0=6?Cl֚I#@`%iC+rm𣇫WHi@`]boJy s+Y{W(qvv?jԤWvpe͓՝&z⨍Ik՝4c p{S_\ҽ+}+VJIW7zmKkiLUAT/QƵ4wxnwmm$IW>vIybf4{_zyH5IvYEbT6D[Uɬ'ODސy-rtUG/E80zmߩ 3R j6=eYu@Z-R9A9yJra* U\${P-Z$jyGYլ fVI)}u.fr R+qkv*Q֢EYg<ng\zWON [͖_'iuo>|OW)j?%qeh;yu{[鯫dP//.rrēXaΠ9&e4(AQt`ֹ!ҞNR/9ܚyx*|]}strﰥ aCގF$/,_-vjj;6}7^)^rg7ZSf2[ua+J־6 =:ήi}4R-Ӌ:j a7ԝTPeph]%tpwv`YւcHDKN-1l`Qz%0X.!K(-<$ɪ쭧?!>wI Up(' 1BFs <߂YoOY>m:۾9 ?JQsmϹ眯5O7[抬^MW8>:Ѿϭmb'ۇV} >+qg?fTn8wƁcn8,Ogu!ϗZ W瑟:zDSwXJ.,.~f)#_~iɈ:d]}Q //QuKT5HR 뒎fgTt|ez?3f+rs)%,Qyls'.;'yORF_e:+\uD ؽ2[σO7G>n: i{Aɭ{yjQ? *`?RҺT˔/A_\ZߧHWŸGy:jlŽ#Z%hrȧJL@D'-Da `Q[Gbv4tܜ[nAwvpifM.i\)]??g ,A*+5H^9)b%C,E#e9^k˴b+%&bСfeR2;ř"pL9>6:7R* ˬS8! {bDCr$JB7:o ΎNǢc~_xIѠ򃑛񩛯۝j7f<]ӹ>Ci6O7:몀yafz fVIom+]Vhqf]uڞu h'/*J]Q%u5O={z3lAmRʊRV k=m?b9nejs~˻ǥ˼͏Gy<?re5}@ƣR???kΖTӼfefk ?[7 ;>~4{Uue)^A+eۗ_1Q['‹/B(N꡷K| :!U)N$(5JL%XoSfT\ )2z7 f_PA=Gb~цdz5czEZjJyetI3Ӎ^Ϟ{OD.>5)}LguWk hyУrew;F^i8[>j[7-NV =w!Υb.CqHh + ѹ,aXQ) TUz\ &ԙ{Rѡ@E%q`U憥`cpv66)@|],ZyS>˨d>^i8e) _S P $uU׫ciȅ2\sULHYhcH1kl8ֺ;/w,o֙zMmp]^(I- LLE/E<B6&YYdI~)踢LoveMR&zd 0"rNC.y^Qmd-)FdBaI:)Yyj6_>[&[gaUOgT:UFJa,rMatfAɴ"J0T"d;錑^Y-[=^fZp.xhlԌP2% 1GdhsR)f;4B ]>>%n7Ld(l2!,0mJjde18;]٢fo/^h8ft5.MFj & YBI/mL 1k\D"3Zo}lwoDOqR *a٣g2J|\dP.GͨUR1fOU=~4~0G2dI]rtr{.$5S[MRp{h8iv{s mlܙb&׃[{=H_aƴ8NᬵXwh\ Ӣ$?ʁp )"Җ)fIj4"8dco䙹2P}ܷ""v蟻hpږטrCy %ȃdBk :FIJe:E!5ͣcmhYK[mu5蔤V'ҍsOj!Qf,'$MN3rT_BPJ+Ʃ+tV@zU^LDL1K+.#a^@d rC9Re: |PV= RC8휴G g!f63H$wK'jN&&Ж]-C^yQZ5m߁3P^,~'Z/lث#AAMTE."$u|;m)b%[7[7P.y45Im <= qi Q?_cM>8=U(]?T~^rsMU?}?6D oOGv>YaAZFȩ]i/N$@ m&^rj{M64FꮺjH]Y.i0 m R2M !OcMd- ZIDkd:Pgm76G6Is}G/?~~[⵭z^H)l[ZVo[Pro-%=87+UT>NfsB7O2URlл[[-=q+HeE,*Jr&z_/+,%G֘,P 0"q˚ ~w &0[8w:2c4iUמgCv2M#>Gl 0ݸߍ֎_VWY6 r$u9"^WSTzc/6߫4mne) '0M; 8[J$E$S9ȝ7 @xV[2@Б^@! ma@eHYrcrxμlT@$g% t`ֹ̰aqx׺Szz>Etgn> .H\,AoJ*=~ݽ?+gtGSj&+Kn0Ȁet@W^0J$b(Pl}~2V("kmH_e68nwج`TwW[L(RKRgäHb,:4ǯ<*v5"v^Eq]A&{z ܪ,MLIY,$ʑ8Z)41F (E!m8ؒR& H=?Oz:#$oit&jqs#*5\R53>q:/gȂ Y$rRF HT ~d:?L;ѳV$Ϸ&yr "!1,g(Yj aIqmLx sDN"y \8*tJJp?s=d3K$4L[$?1)Z޸ܱc5zZ>CC7 Nonn 5,A⅁FsJYn6 (o*n`=u\3N{&(-Iy\!HW&9},ȰHC<˲6JF/Pˑ'es4[hv p2yބx<7س  #&Lk~SPlMPRвק>rs|1(Hڗ{oezɟsP"TRy_Chыt]-q#apoJlX+ddB-f4 %)*řX7bzŀѿ׋q8ǸR`quQGTWյpN GDѷxdbQp{__>u7JnB[߾_{>I!ERqa],BڑLK:TSd̖ȎdWzr͗7|6;W0~jr⽛+hOjqM~㎓}T%l0#;(O*^.F9d~菷-VAwSbEdWQy}$\H`41QPhP}2m^OJ욫CΚKو/d]~uCG0%[x/1\ȣZ?F#Iw-mì;p =WSwwOtoe#ʉō@`+-'$&>d8.or_]m j{2.`XA"Iyq2+|p!T3WQ`|ØtRMWExn{'c4ȪHa_,@>ͳ'3BM- "FjrӼ^7ݩ6yćaB}M׫-.3Q~նOf;naOh5qg%xO㈌;Qf1$˼BINH[ャζ+PTo/Ph"¨ě$R,Yz{! 0 >#7&PYp杗,wB1@joa:snŸb( _'L%xc%載X[E2b}xf8cIE}xIvI3&i۝4CmKь=P ̕[2K(%0Ji%g3dR{Nkӯ2m&oﶫCod?N93ZW1K/5@ RؒX1xB! x2g }zhQWߡoýW}NnٮOdrv]p`V/Yk&S(NK&jЬN"۩iű-O}ȶ/ڢ<۪'+o75 LR($Ȇ= :'r JjUDReI~f-Dj,Iei G \4 ^<'ʪgLpniX.AQf_F2$y Х暀E̫Q"7vX2^Y.'t]dJgL` td\RZS)E.1)i!T1=ӉsEm/B$#VZ樬W:zg9h.`*܊ݕ-FSv`m=5ކ5`ώ wfuʡzWG* gx#'!iaG5݆%OZc2KuHj.Fcq!$óeVd1#=:#Xf&M,iDdD 1,P \̹*z2 0K͍;fzlfg[j̴^k2 DA IY<2D5a3w?VERFFR2o$%3)~ͧ<]ekn}wp=ι[c,|O+>FKO&k}$eKFVvUSg͜Ʉs$c$wB$0%2nq2^ "#YdVZL X/sY)AR{ⓐ<)v;ѯ!ȎSSH4&0)%e iq,cϛ,E/`RP+&;e#RvGy.1 f\}%!-ڤ>Mfi +@sGBrydx,z #Țٙu6ZwjKjq鷽q6}ۙ6`5&_CWsjWTfI0Y|YUIn {Uy7m<8^Yƾ*edwKa8,׽.W =Y%}]ow[V(jW;DHK+iuIߗ|PN4/MNڴ)W/UՋR0$T$?d>JC ooHNxG7?|*]R_pN|ь0I#+OiR/>ktĥ,q竳p^NDKDER~YtmvM/cm[B-8];׾-ٰȽ|AS{UتAwMSbŇZ\ڒtON陸SӪS=>=axI<[NW6Ĭ$Y;f\dp&,׻U /&lвɃ#r7?I1[=̞9 !,ڤ`--B䒍ZrRvK~vzCwzWI;<1獏;L~Ĕ#kLP5 ^2d5C)Hy#.혷m]RX6ԚTL<^"g- n\B1+D+?Ɉ.﷿jH-QS榵f񯧿z0l֨u$R ZQ)iΰyRn]b-J$V$-[޼7d BJE\:0.I&'OFFg {!E«+\Kf.sx 4Jc0=ކs(kE6ؒcO?=1dIhF'U۬AE$(-m$"Aee~:b֓e7GI:ּ%yDI&e.̨ fFe,iyM+ɌM5mʋ*Df+t`6=5Unf ^c~}W| ϗtZ*JLLg}(eÿE.m}B۫d4ri %R\" , `r9+HVIH-{-6Uel* }ϲdW^)d^l܎7>|4[?mFp6%vZ%d6[27 Ps^#:/V {J=Bt2+z`1 &n$QĦ$8dT0*Yd1P79b&zؽ f&tul;COIT+f2AD30s)W 8ӅLbVXiBsEVthI$d.$KB-2,{1 E0nD>6]a$⦅wh]D֨gcjxDTH93Q+dΎh JD8c*Q+}6d$gB )4hkm(wa hM honnv{ZYYR$ٻN3#il1)[V(C~$xyH:4a#>$,/NZi3ˋ83//xq="*]yJJQUD\ G5 H<ӭ&!̇{.4^V5U9F"6QQQ-Nя(8fCQf3Q\ޙͨ ǾUpi3K܌*)rw>~6tjO7 @aȝ&g2R ke%~9U}@HfùuF2pv eRlaJ3zYz\7蛲K)"e.}S1;(zB4͑|7ܓz`T%oz*7^ 1Uh~CpT eP?RJJi$*CڼZ[Wr( ̒'*4Ʃzw߄8DP$%ƽ|`&=,_hgo^jh]ekTִ6׳Vj>|'?A(e`:c%ZwVcJ*N Ք3tp]Vc+@ɵ|t#Ct+Jpugf"*DW_ ]]/ΐ$g禫bvCKՎ(qѕ؁ĉzvXv\Jh5?v`7Ne^Έ^jJKBŠAA!+ڶynrfvYm1Ta /L(& %/4ӨXIe_vtlhbXk^#*LR~wNQx1RW(i׳}ZkW|M/VZ%N+}/Z+L-߼:1'`6Vk#A(6̫L!7)с(j6weOGPUGS1tRB0sH[]DS02b[I,VS2"Q|hpZ+.mTE/Q*;bǛZu^o-U[.^#.6ye4ǒUCÔkbHh[Dtdh{!ml> )}MӉ}f%u7b'R.j(ڇ.zoehri-W PR^&Yԃ?~JA! B-޿\(TktzS3vyt(Ts9^ ŧ 9v +.\r4TJPb0TԿ^29uo?_)sg!]/ۧ;ߴ_ %PCY Jp|UJJNY/p>R\Om.YUk/9vւH(pJÃ&bMC[;9Ltr1޵)s{>5ﯶ!]ueksr}wEyscz4֋3O|.O:#\ΕtW+ q.IդmƽRUd4Μs%yf(mlTz^Cފ/oZQZPrUkk<+r< c3IgNI焳r]8Ëޯtuv{.`R埐a+-"dc*25CB>hD6›g s4eci/UE8Xy)HjWJlSZnYݷ~=ʵx&hzQ& Se?5}/wDFM9|88P,FPFSYFMh33"YJ'QC+Lcޙ[&0%7x$ Yab*L:DW P;<ҝ)5> +$cCtd]%OpJ&Jhmk?Jy^"] uUDW RDJLOtJtk 3:CW uh9NW %=9/N{;DWXtLp ]%譫EҕZr!?u%wyep'硫Po_]zy+.:BW^ Bylt%w+y}>KX*ݡ++ ]% ]Db=N1L@=hz(}mNީbX'~w%2-d-7-iN9q#kqS]deO[2͢g[mxL vڭNnz"%E<90'L<%\'P[ɓ{Oa{t *᪮UBбUBɉ^"]q-@߿Fq+(ֆ_Wuq ܮ=[_|݃" 0f=;@\ڴ߼2~wͯ{"~7fUFlN).4W%϶5v & 7?~blfr^̻W{ThfQEuߗ9B!Ni6BUc4/:}V9/s  @8>_I2UE?|{?&x3WiU~Bf#uI =\1z[GU++<8~WC[Pϓ&RU-h`h6-rr~18?EIϨBWc#eq2@HbƒĎ_.ܦ N{4>ܜܸagI?..d =!"}a*-{)inbi1g;Ja8rL '}ꗪ}4>ݜX,^b"V4F Kd.K+_ZPlUV)_iVyeOfv< oSGxP:Nj"39磛a9,ȹV0UR;;o(O?1aӇ?]O?c$YޤF"QMԒzP9"V-f՘̪2yi&gX3LƋTީSQ N"Bt2 r DJ],'g#GmX4#eUD h@.(|8}]`DʩR ˽)YQٮCZ+&`IaC3IY.RY:OwhNN$>4GwaTBj<uM^u_SOx_G%Zuz0;\wfY?ےSrGӹhqZ>(+S#<kKdLT\ 2ة^ Ri>Ȭ(GkQ JH1 N9%bdF5sf"Tnfg;3Uqa6ؗ uf.T'.<(.ͮefڳzXN;?ٜvfƯf0}/#K`Z 5y*8lN>:%4 tXb54]hF&gxD!`^e) Ӂi]`f̌ vƶo٤XX[Xdn8S{"T3$"pr&Lyo 5Nk@47I&!̇{.4V5U9Fn 2f*f%P=ke]K$jXYQQ-Nя~~5AP(*P.f}A](xI 0ǗX9b s K0R"L=W9U1^ Hd)1{19ݝ5]yx{-rn,hC~zT} orG=̷dM w;tٽZ'bhΫ?O9fgw<[=ӏDQ.{鹝|g͡f͍Ll49&\fid4*) q,bGF $JC?AwWG fŴtDAtˑ####$ƃNj$Y}wˆ^FcD2Y+냉ԧ+K8FQ4`pHgwXdK|< VlQGoofW]3\.4 ;)-[i?n;yЂZCE@1>4E)cAZ #yV vh?h 62( R*:;18q|Y:y<>EJ+NsyLܡӟĤ3?'3?' e/gNlU Prf[LpDdH@` mR@AX/O_zI8>ùjX:}.{ÒjuZzqCkߵb^<Η_O& `cm H?_MUxtu6gLe5w2:/Fg={mQLUgH[UСRMP[YS#I;"OvX6- d }U4#:N&$S,ddRF5w'_$1tib3>C(Nߺ*>5gF- fqjRkH&N<*PoaZŻfNR֘"3y$T5jGPQ:]_VPk2UN4IʻZl)cFR{$R-Hv@ςΫ΂AV:B<I JN> =d2qGz.2vUDD d %-5ZA*F3Օ̤2FEV$64۬mEDtleM]EL/D7q_$sXb @L)C8rB0[(;bpP;35y^41z6U;ڑ UXK^l|CsgNn,AD/@9+HM$XpbrquÀ 'm ވQ@9: $n.fҌ Ew*ݫw6=9B ],BOqu44xD,Q;u(3s)Y[ pMdho%OmaL#VqlN5cןЪj @uFcuno3͵跒7<uXC x#J.VngtD(Q=K}*I~X&][(0-IT#9qxSCI)|"gtj!7%;%Y֑vuEc!,EqsX`hl.x>^:/6y'{_֥Agq rDj.yszƦWn|<}!u/ٷ/-9S ԅ鋺K]F/R=RFNG']VC^sKTSBap}dgvi?bhPPLLL$eQ؂Kxsn1(fHkf sqH5ܽ[j`Cge˖RJ9_N/޿aTù=t t+̣-xo[y{v _(t$8C>Ѥ(?Xz|7%C)y !U[bJX}^-zҌwv=JvڡlYm.5ZVbd * @-UN>2S~7n8I1פφA50[׀[B s߃ "{㟖W&ky.?bI>ߤĞ J1ફٻJL -ȡ)o0j3DYE;TE.`VbFRw- v%>G]Z];.`Jԯ7n-?mhϽVճV:o]Yƈv~j{PvmUS;Jy .*+ml|=ʞ~=&m :šs-X`K 10kͥ'V$౳2-9H1)؋6U3XxNTsʉ}/D␢Nx,V* S gKܚ]n_ox-?M|U@X}A”Jq)}S'L$Tv?J-:S~Y ΕiK 1}+d!CKX(EkjdU D{;1oQzRYr8R'FQB-n"$њHNVМ*WgM?\yjziEQE2^Q !:1@ɫAjJHN2`[45ܤHH ~V&TTŨ({E,Ws^ "ܪ,ͣ t\^wseqjBTe>|%b^'{;lybXfh'BS}Լr=uq2>ql>GФ &x0M03s T"'پ$L Q`M1NyBGb >Q7KR2h>NVN &̽::\?w4󠀪EHm)BV D5ܢϪ'\I.'L4]vk2khƚCՒ/,9motͪA;lTOI*Hlt]PPdMMg<'{ijɰK뒺V3_M<)^j{ױ, l".BÑyj좱ŚeTp4m,g!;ҋww]Uu>4(\ xUd$"53xyLaP }!Pn88Q~d Yϗ[~g[oy1_lgDH+LTaW 9CYcwM)uΐZa0$̶kC#:.8Pׂ4Vf!$WzLV Č&F4E1=O'-N6+t& P Ƀ ,l~`s=W}(Z$"؂XkN!k'9z)$@ UO3}Cg g8vOwsHYsqv.ĆI=brMl5`Z.A-"ӃY&mwb\kAP Fˢߠ̆gsx|Φg-K<䒝+v+lJ# %(Bd1ƨ:%$1-T}T]TW[gEd$Q5xuhzz_SϤGYݟ&R2d[/=fLwr (L:mP 'KS냾V彜tPdd [4'$i}qeoZyi堐4o7we2C\] Zn6s 0x΁/[sI96aݚWu!JNc K^.O^';y2 P2ԯfOQ9o9,/%0 `omV}A2oo G@ f Qn {f<7ֽ蟽8|Y,~ [?_9~sq0o˲$c]S8xJ!R|P#u-)Y~?ѻ|cuW_*2r/7_Pg\J6󫳾ʛ.>NX&<#c2;EI^ẘwRCV|?ooK>x}pUo3%Y؀ ?!/_'[W|%k.r?2Su.^ŋl^M(iWω%TboWo(w&zo{={rJ{KaxNɫ{.SRjwЈO GCg_R+~|&.E-{xo6HxB[8=ZٝoSǬ,pٱgG 8],`x|iXJo,QG'!ޗ3N,ם;XݞۇSp?W:_9gdlr`uTs<[B5 >|{|rœӏջízNiD v |j/#rH1{gpgͿ0KTo>3qqɌ+#G_vv|-]9oػ|u<3CB.K9޽q [Yd8,x32I-7e Q]W\h5;^?ŭ}g]1ǀ]G˭9hW'ǻ&?%d3JIl Ym9\ɋthیJ[=2ܮEnnzij;"6q%+Q %,3G2J?X8S8C^ѯdXq|ЯX(|WH="tT(!y ʘ턲RQh4,4/GI2|sR\n[R&5 U$( N%34JfԭR :OMiIm97|5ksIJ; f+ K[UbuftcSIx34fFZ+ \6YT -; \lQg-MáuHJhm5h6JÛTwL0&_;+:Qeu,o94whY= >8x!k.@#6`Hڛ]h O1KFK[v҃C'dH:JɄ_O |}6>Vysҋqk""( z[4H4@r-Yoyu90iAJNj#E8:BҏR|!-86;'XR!xQ+QЗILIk[]/KC xNYb9k2= %pU&'[l^c& $XjY)dSH! Qc(/٥$ِDȗ B iX E@ +٭(Pdx┠9kA9]Ye~V7 (QMBWK)q1ɠ-&OZf Kk#4KYـLk -Csp3TW˺R`T^!LV)O#)ٰ j@E ȔҐK D_ Z#@آy , R `'-t*x4`X4d 3* 2ʄW&9‘F9GDI꫐Q64^] Ʋd2 Vj$=5(!ȮD@$AnTzCE5a ȸBLAAX&%P C9'X !@YP6yihFf3f%LD5%Ax`7kLABC\ɾL6'8TRp 3k#D1 S r ֐thA@gJ-)Qœ E@#)#MEUWfVA^ 2!}T$^iWW.!{/hj{VQA}YsqƵ yR U5I&:%R̓qPBi5'Y, Hv/UPTGwU+cAj\i!:7ȧ<~٣Ѯ#f$' cEeIJB 6}f ژ`ojG ^ oWZ4JF#-uy6X ·kT^ Pd$Lt9(jZ.\U>:!d4XywYN ʃZR I"9J=*#P>xm()S+3]kη*((^\ERbK*!x6B@FB|u)}LNAjy1k"!98$ˇD `"8Bmk ) 1E`JYM9Qsc[   e}IPS` 6 vVh#EV N+}KƢНIe#KrpqcOEQpYIbD#EF @YFu(?+,;U9cJoaL>1UGV/ݤnu w LUhda- w(֖mo!8`0! oC,xJRD )OSnfeG$_d$$wPU(V npF h=HI)'R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)2 g2v{%/hJ +R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)r|@ Q\dzQ9d@RhJ Ӷ'%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ RhJ a H $8a>J g.B] ( '%@bR@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)= ѻ'[lO /@cJ|~ד" 6+.|KlK9z/\Bpi¥/^ é5%Hm~m2YfFZI3,[²#ϴ[?}x?MˢU0wt;bSg̜0& vn7[M㋦۫aFQêq4d秛ƞ >qQO}c0b*4.U,fW.՚MoxrF&8D ~[2k>X9k+J) wN 7[P_C.(mr{Wfӳfo @1G(ZPϾn>FN<%$ %YVEtN<^>HUrM p x~@QZ )it0js+D+{OWRIҕVrˈ3n *wBF] ^洪3 DBW' QjGt5@J+!2+ld6tpȅS)'ψgu٤dtՠDW+:#2&ՎhesWRѻ.]NxUW^cUg:utfծt.Kt:'3+ v(BRBWV4jt%T hx͈{ĪHxVtk6FL/2-f8/=)>Eo: ovqﳋ{.cW*(eUXʾA6#o@GYݐ0AaȈe6BBW(I(JΈЕ2f\ ъ{WRjƲ ]!ܣ.wF+{DF] XV vy!\M rNWR ҕBJ]!`+k]!(Bv Uک  Uֲt ,ڈ_?]!JI+_]ѿt dt 8w +ށ8K;.ʈ,\ (]QZmHWByw%M'34;DcQfC5-9&]c[i39.<ݮoﵫt4GwtYZS)l&Gwtdzur.+u|2Ff`L6$}A)`$'}GWXlObjt<RdDWl z ]Z{&+Dɻ"]i/veCWW\ ZwB!ҕJ]`t6tpE6Ut(i zUN a"B jtiT+Cw({uZ'~t%vzq˜ nͱ\w`Zl= ]uC{]t%^s/}Ftm>tpɅ8jt%BgDWؚl CWV2wBCDWJ֛4zw V!GڭR=M(@nR^V._r?]!JEt5DZ2+u6tp-t($ ]{]`)U6tpلh 6(=j'lN GB7ZBWֺT7j] aLg  ]!c3(9mNtgG:lDvDhCWjǮW'h+jN;ޯc~ >RʾZOu+EtҮY+2+ΆU =]!J-HWU~5L3!]FXKzR#o3>;kҏ1fP8M/.K0?tz!~Ϡt ?Ӻ57_h_mc#q+n>-jjbV2|3R,T65UjNrkzbӳm#}]V|tw7NZT]VDٷ6.̴rMgUg5Xo eKwT],p؅biwg||?B.7g'b(Q!nB  s>zu%|/kִgm>Da6-]]49_iv6pl0izw/pt|Mp1{HQze]ԏQ_߯l Ǧ-xSMQ1\uSIU&[rZYɤ5)ڪҕB(6K MobT橨4^70(=Ϙ:d|TepRd$\/<+*uLGF|>X$l.n8[ b|%05W~p[vg/Y^sfzq}6K84HƱR?ߴΓX_.5:5T6U")Iq]V'MћCAՊ3Ƹ+(KݖAhA.\RE^;U5D>xx9\\Vét M;``qƉB8uLT J: UR3 Ш! O5 ћ4mghмѠYg6et z_=Uc?S.0abQySxP4UU{y+ùkA'o0^h``ŞIv<4Biv9u3ZiZk3`~i?tҊ..Ҫ%web:3؆[Z^VϏڦPA@)>z"g6&pLe1^f:۬ [iz!4Y'{ºye]0i|AqaOϼU7LMp6NJvm0sYtr}-[ݦ&\MK'Q+|( 5<*+sP?q{&쑓HC%Vt'v8@\7l ' +RTaR?ۋt Yff:vY=D1cR)Fab=J1CdMeU0iu] ?ɲ0˨ts#"*^o:CgSZ8J}JU8 ,=W UxKsil) Ύؒ{jZ6^MǓ#!d\U6!AMd5{Z^ߡZotY g#NxīWUOM}k;||?&.-:BOW~v}o(]@XX~zx3n<^M^˧fS!,dݫ}ޫy[WvUi{MOԷtc]l~:wˤl—_}v2~rqGo/_5|lLsu]HW ee@gi1?ySq=<2;qhV?/"]c;`ގ(6?Nl.S*6wpanfbwY+Y;k UPLA^O#j"(vQObw׋h5J:nI*|Yǿgkˋ>Yi ^_ָzXj&R),<&AGp\ץ+aOs}!I\:Q&^. WBG# :p|08Ϗ`c^?as-ES12|H~4vVڇt7n auQcG_8.$P)Te:Ng<~{oKEļFmKbKBt|$y&RlvMg2a, *4mP/-T;+v\\}(dsƯ!{-S=֢sƯrSCV6 \*rDrp*jRĂ0*aB UМ3xM>cVyÀZ!A[8-=l8&Cw)rPePAD:ʢ3 ҧXMKq֙\AMBE_ Gsh6b 32c.*Z\VarYEm'A J !2 !>gSvMFOlΖ|տvʹ{G7ޕR)+A{(K೑о(gl#6"RKL)dz 9T-Lք!T#UPڐDC8+j tF;酔d92) +g2:i >WR^`hF1Ta&v  x+H+rE.5VZ@ɔPh DVWE+u<"2[|Wo0/󅁙o}{3Pt6VxA2x @i5cR*tCp&=cs0v^dac0 /wā#~NгySk(Hor^<˯ HM)M֡kA ύ'>8.A[LXЈјY" (lRv0$5bP2 glZgDTJ6qtNŖ,cz†< 6cv(M nz{ts,ЏP|:o>Wn= wr3#\GCGg2BIv-ML[НOIm_'MV.h>r#HzHHx\-5$Ia$ /!&\t'm+tF}mv<-@OmqT8t0!Gweh%JSU&U{ᩜᩝ 9i{zrmC1`5T!UڞS@J媆: ڷ -˨c& &zv6۝◥+ݷn|n.yEs/IWM^ 3YjC^(?|yӿL?0DY OcѹÄ-ElWISդ18GƎ;^=\ ܡ؉SҔK:E5aZM ѰHTe7E bҬS.c֑WTUCQ2V[ g\%xZ{KI'ko-mCVEvRZçmCލo]`+ fīσWAtS _:T-5]5(.%bJȃٱp{\H;`v=ʬ ljfcSF$WK(E`h6U+ ֵ4ZM)[ AH5AāM3$l6TZP:jlYk\Rd:\D)Wp Bi%VLI9`w}#DƂLn9Qr5Z]͛'^V &gkMcpcFA8Iu];1cOP  mӓts!'h.5j*d r*/%|D1,Tcacэ,?2ˣjz*մ #1 #)+ġ >V1CsG jDZaJT0*42ғ3ҭq߮xnfrZ 4VB 9 8ݬrs1捇*…Z}#3=53͏˻Wav:'&!Q,Z..#+JXѵ-Bժ7g@',ߩR_+)u/KrUL;kg}[[vfV|lA{U,;tXbhи3Z &sթx żZ=zcʪi G'*`C@*ڀ PͥY3(B”FGPù̫Ad;-|Iߪmj˙"*-$| 2i])k(8Hí^ #h,K)mNLTlP9S~W6b1QYaRcƒg ˮϬȅU4L&hXJ,&bREÅ+:Pt/?S|=:;C b"9RHXVC%Qe'ajt@#p>'^7׽u4hPlg'ϧfO`Z: ߵ3J̬Zc(w-!_r \c\߭n&zߥU[W#z&գ!-aєW(8-*u%a_>P ёnwK.aW3)I2 u]%yJr9[ E$yU fZ OתT["Yl-#k]W6oHY']q{5LyVs0]wi[v[h=sFk~|Q諹-8NG wŃ l-8ΩVyqTk## Raay &bL5ՖWh֡o3,P Zjоʯ" ~mbްڼkƟVGXkZ]5]!.b>pxoY!_{`oGdAE ܅~zS5l~`s 8W5q=r_ Y@s:dkS@hfSKFR`CV&A> /ra\-ރLW1, Wm|V*\6-c!O^)|A,6SXy 77fM_~ ~s([Xjc|. M F]JٳŒ6idP;N%MQ$B%a RbLQ%/Xy޲+ U5!vRڵZ`jPTs)K (K8aNk##|tHlM%Д 6-vIr͛{-eq.H>߮ȩta5iqs77HvPgkY2'\9lTc6e*YjUm*JXA0}EF3J&yjJ2;2bG v䄧f(D4Ny~Rh\oYrx\**ʊAӶѲ% ^l2gBum8đL"!CC Nhh >@i/ !/]BNtEk7K%CbСdSdC>l!HyڐHbb>uu{O/wHw_ѻ2oQ(ɯ+Mˮ?k$/&¥c1'~}OGBxy̾=#8oʌNNchN~YD,Ǵpr.j+xosc۽ϓ>wHǿEPջ㽋]ϨnOF]~ϣqIȧ!oPvޯo'hybϋqͳ3O=ޯڿI;;;(~V22VAÐc"brefӽNN8:l>ES?&"w׿t&2{F=;x;Ź5R;SfyŠIOXK:i }p}DM~٥(Ճy;vʇڱCj<ѓ/;k''4+fLMWHx>K^[a_VgQ)'_o=nf5׵=5RH󦋂kWֲɟ>Y3p(I[:ŠzAעg]s&O6>l9C@xklΓ OL+ՓÝm5R .|-,!< BJ?tGNOwj/|ҳ?]ty5ݦ f~c eu$ES>]nx |/"m3#i7 {CX7hiwi66+8>9ˆ<$], an'oifΤMَy=6 0_I]43{J_nʬ_ްδ3eo1zqYJs }sY4nwG ٻ6$We 6X\m!U֚!QK߯zHʒ-Z5- 95]O?U: u)J ܉;(Xj!h1cĩWo/{hixFNtS0.'2S !l1E)ZgH'=Lzmzԍ`scvr N>{䅣Ǫ+ԶMOvM?Ѯi"Ջ]oead쯳U4(7ye\/d.OraU^E g}y^۟W~ZxN=ۍ./gdNH҇DKq.6V_|V+ rAAYh{$siҫ;w_^.O?őNOg:& ҝâtdb:gm޼ʵT4hK-"8Ӛ~3Ӷ<&W%omg꠷(dhQ+#GmS S] bm‑m6!2E To*sͺ6_J2?ࠪKۭuer=YFڝ\WޝGn-y|gK. ʐT'^]Ngs{t~3 ˿>+u.GrԮP-iD&4ht2Ax`rdqOl@ε]/M ?]c3y'>b)x\dP ˂*T+beCN)rjRAwƤviz%g=k0f?WXnɾy5{o~?W:lur՝ݿV6XlvKttV3zta<+. gGJم `bM?A*| ]F[29JqXW>zͥh5Ĝh#.3U 5Pc`"hq0qn*dɭ1iՉM><$w@a"e P`[ LAPS$)EN&ր@Ѱ9UbDh@TzQr &Tl%!["H|XQfE-G-ű;ZZNpڌcIxlzxlv^o֖9y'ίqnM}2j;w_?tAQ/VVLUVXYs\ N;DN)FZmshں;smzJI )J@El'Ak H s32U3 y`,>xQr gp{Onm _Jg xu#v녕MףK=Qtpܤ"\w&Ulyf@#xVn،Ma9d8SNy`WSBнffĎEdbcAmP{b_mBμj*W T %bsBb J4D.a7WtRXrcmŊNPH&iG(9bUP`܌Q D6` "6?DDP"H&D|66=\.5 XԿj[ds8"FZvTMEDZ)GVl4WzRp&昼-8MP|JbIKHdb#`" Qpq-E7&_`d_\㢞pqůTQ2oR ew=* JJfvʚi`Ňű`c_.Rɉ)kf|3ีǿ~kmzWMjkHſKvpXΎtzSn%5Yg[ѥd^ >d6^'UB>/43N1YPcB%M Ku ? _Qȋ,qUv\V 7Ԝt,"푦sS˩}n>7uMsS}n>7uD;خV xne9NN $&jDeTYnԇ[%߃ZqfD- P- 3oN`eXV!b+PBuFGf9L*B[]=ҡA58 s1ܷ\ 2ɂȲYe9疆*߮lռ^EeoFmU1MՎa\r+,)Ĕ"A#E&00ΜmfSY*tSDY0v>[pٚ(d Vj>%󦄘T.'EztEei2%S!%*](4LUk@l 5J2 LNjUsמ?ǨLc̆(& |@B5-E@ gB)<|-oiuگFcI٘=+[/W-9;%5'w1jńNηfJø!ˑ~>|S.Jy1>|ր<{,fgr][2ˊK>r6iy= g?_w zl~Y],/vc4} V~JnW3aѧr׳ 'W72V|~:p Bw1.ѷ[,=ڽNL뉘ʻcZH͕'M/'aV(b%!S^WNxIm{cUe'yZp/g7@vשڽ&=B[Ƚ>}vwWΏm^ejw~zODU? =oi߾ " l*j,BbIV&SЌmn=[Z㨢NM$:+jH8v@F.2D֤SN"m.o 0<0X8rbY~ 0[.8bd/qP1JilՄm`tjP`x=ER9 gv!mJ|zsu; ;䊯ͦGluDydёmSK+hClu}NuclgY섞 y511)VqGCT V X]^s5TX/ FM&ϲ& xqZ*aW)Qsp Ġ kFJß`Kpo>dHcŒF;;N}==ьGe7 Y&]ͮ*֏Gr$ݒ'd; h(դ~!NXo[dv_P]DQTٝR2Ad%{/wzwjhg:lLQTfz f MQX[C&]4W㇤n^\'{O I*kOfXo]9m.`Jk9D|y1huvLç4teY%zMK5إ<+xA7]@7_}Ar4e*V_xRJZY7}w\_{|тj%&Yv쳚.. +tEαD:rrCF˧-&.wZL9[u"r?oԍBJo#wD{D /I΍`Sy+w:];wobg/;Maoy@$d뷻}o8^ )O wĽLG&wP;Gncs']1U n}#DFȂmn^g1I o?bYfK6qn7Z;u\6@!.Ǹs.N?s 'w/FJgs':CVjOgϜsB%֤`-=Bd%-*eqWŸ#8 ,>V Z9_TmGם/>5/p;2K#1kLj4#4Lf!HMVzaL4]4yp雫6Uһm=H'睧^8:Nnpb-ƩVcK:-JLLg} LЛ1jEn&ibEkW㎡hm?jт3g'Jɤ XI& 3R {EH<5`U\BsE(:JkhD 7d8 R։Eh*jև_D8X?ԈՈIx]Dh3ID/BZ <1!Z!sv,%ҼVlJDX )DРVPV*ڕ5b5rf) C ^cYKՋ^^]X`̋\Դϝ"1pIz(rϸ)Q 8hiqǡPVև4AHLdѭ;w1ZQt^{8ޏsRս'ɶȬQ uG)ZmnG/&owzFW?/&7#Xۣl%g1k~:s\jMݎս_o&䢣0ItHu r{O,=_"E-> N$zs{m{rA<^MW*{taU:b\'ͳOY~ҷНnzQfy@Bߙ/=4_?/kXi2m{˞|76oȄ.1" UILdH0J?*/>Kʨw{ӍY|`ev*|eB]{Ψ;"2'I.ݳSzWflSfn{3u5V#oZw~+x8˷(S߱f㯺=M弮{1?)׿[i+޳V9dk][Wnjċ ]f%UV1E='I l!1caw7n4A͜%heI~<B4"sxV$J 0<\`P"d.*a .$,R y%D39|e^=4@l| ,PZ.w=lgm6C7ZX_,UekOAO`EqvZ ,2#ZlU%8zspVH zCvQBG#*GV;OB5rvKv' mNX+_#r 9ގ)Pr;GBh!uz7gqWKU$"X4S/AAߪ8)>WU~M aОŞn<4+/.Ьɻ2S!*yaLً@ e-`@,i&O,5gsF2hG ROA A*A"e}-[m#fcK78Kcc 1Ŗio#X g'tLMH,,1RkT[ς`q*1&(q(%. &At,,]FS`̄xb( `R812rJ8e"++ؙ41:3&dee9FΞr3Ny>.Fd LzI/]L }6Z RZI1KF)>zc?J'k!8-9oCAR{M*giC)wgVIca$UԊ'8d1&m#0t,%AҿAƬ}!QPV Gr6ц&~B]Pfc/\#ۂFrV=jm0J+o5c Cnt/{?YhcgtZc^Cdhs"eR{N;RDUwq y:Yi޵$B,򖡧/՗"y򐗝"-n%(iDzH|]W}.Y@VZ-CzLi<~uzCׄ)}Hԛ^Z{z,mVmgut u8AzꨞbZ !n_Ԗ -o! X4/ifIV%G|bC"9"~;%vQWgH6|iVhۛp9BY]zn ޴-_jb+g?6wců.@ ~A~bg핲^SߝzjmPRwɩYRHR!O8^݊,]/?DȖ'J*/sf:f_T&sTxYҳLg􂮗GB-to*w:($95|3޺o=46ֺTUfnr,09X9 ȿ={iopeð6 _2qWno;M,}9@'zfgE ק7<>yD>V]9.IeIFď7Γ88u*5hmJ8LAxSlhE~\S&Nߵd.ŬUEƾ ZJ\EEOV,xyJ ;"nϯ+|{__YUB]mI ×mgX>0wSx"f֫ѫN+aRZ0ӣ]- S/+(C a_@t%+(t%h:] hf:B +: CWQJnTě(] ͣIg:bQXct%&Ϣ+eNWҺzJQTsW]6H]톒@W~vj3] `rЕun>Ne3]!]>DǙ٬_| ޔ[Oxz zӛ'fih܉'.UۻK+z 3zi5W:z>V;C"i9Z0$-p0$:[|RPn?}ւ.hOA7\/\ӫ+_sܛwj/O\G ~:A&~ \;L?ed1~cFBn)d(t%hJPSGIWt%CWQW@g'3]!]yODWVpy6L%O#%rV8mzJtut=G=] 0ܕe5 ]-k3uf:B⠽iev㬬~+Ajr^YUxЇW*rߺ`}p ?IRW䉝4v0S^NJ+l0t%pɏBWPvCLWGHW8l- N=q`&n/١"kk\^\WeZ2L 9^;O5m2/~2gzu덢Kz4%2/q&29瘯Xmw{<+Ƅޘmeq#]H\͞w%%KN/bNC5g#.A~LygR@zLNznhcz&'(ϙfr6j#/I nfhS+ALWGHW-0]Q$F틮QЕ'(ic+v `0 ] \? ] NWLWGHW>F6#ѕ,adP2M|dLWGHW6#%8CW)'rj;hfzvh$u3.p6~tOWLWGHW&7]7+k(tMe箾J+ej7Ow<վCU %O,;UCδl+A/&CUB e3]#]96S҆?LX}b̉ RrnKd Gn:YҼdZoVf p-n~ ǨHy@t#ѕ=$nh:] J?#]9@tR+k(t%h73]!]yJ3 CWktut3+޴թӕu<>N(*+CWW3.hJPn$3]]Q֎pT .BW@Kj 3]}7tŏz~U|xu` vC{ڻU<gzkPtơ+(t%hC:] ʩE DW\0t%p 普'|‰Q&3o^K}x6zR5lV+ #Ь [h*(vޠtwzR;vI=5ƹ290aɁj;LNP;grGɑ%];e0t%p-M~\P1ҕe˳=ؘq \F+AKwpvr&]k7Z}Λ'/D'6UlǙTT (1G't?f5s>#qznrkoښ&kUl.uE'KQA|d3wo}nOo_]& YEmKɛtQuŞG *srBgr[m8fctXs`S&UsB*Ue2zʹRVM=SyO0v:nlE#V_짙ԭ/Rh]UUi 5+k%:dՂ6'Bh>цRkPUQskds- DM4rSωmb}oEҮ\.^@Rmޝ+5CX*P55@f()kRS3'aZj{-=zg0vc5CcvWRȈ- O9@Eq |WZa}[۶]qDg {5%Jr*|U7 Ţ˳եcM0mVW}Wul K*$#{u r>=3\"5˶ APu[*kѰ֞ m ,]ѻV(V*ROm`{A>v`]6jc4)*@F#  ҄WW}YbȠ(׈PߔN i]P^j+$_ȱl&!lmv6PB]l@+!6CA!dܠ ![㭋@$z`-e2] R}\18c&#o$VY߫;*r1PP KC&À:s%@ ` oL/K*ACM5씂D]㶂EVSמ%@ +Ex@i62RY&ʮ|F(AŭURE|)() En$˝{Fbf%w $$2%FheDt0Pl,%3l C$F=(a&}A֊X[ Iۅ ̠\ =u/fإeƬ#8Yh>(H;H c_指<غ3ƲmK\S*s*g{Y]`.%Z$ >:%AuP4 D VqmS0hM̀KpIѺOkؔ n 5AKFÉrB(x0,) n3EMU0WA.< mͦ6ՍW՝KC$`hˡY khp<K{P@MBo$ hw ΃BT:ԋκm,n=/W}lXi*+F .|X_pjx ٳh#X\jX}j*V%|޼yd=|$1z:[Ѽi`G &MkG,l@IgP!m7%sx|DJb|v&?3ίXPJb;jҽij@feuI%rKbKep(KT^ ttE :q~YІ0tJ#UQ~P`%ˡ+|)tEhe1]#]E'v"gKhϳN 8t%^S ?;;t#xX t#JEWRܟV2]}JKgG+YatE(rYZ}i2M@[-ׯrEz諭~vb0*eFŽ = {_=]͗[[sDcOu$ 䛶z9}s-Ҭw9U:>^S4*v_u4' S44MYQ{9Z- #Inu[5+hK7j~)D<{㓺I"l}LuU붮)wSO`圳 X@pC,nZ/B9%z>ݠ]FDW8b p/V 9t"[9Е!*. uIc+xs%G)+L)tEhQ#J똮HϭcAtE}9tEp*mmҕP]`g+t8v uE(e:B A~jtEM9QDh:]EHW1]AtEc9D1+VԼ2ǡ+yϪG;upZ!KW â+]IOz MU,ztE(d:B7Dڑc#֜jv/o/yɍluOusZ*ژ16V/ڟ݋elk7c$Ishss^ujjɕ3'S-.z^t, z4k]6tn^*ӈXնUh\jÃjHZ'lj.ֺL*:;umY/[wcl5hV[S5!ds#0De-ȓ#'E)=9B{rGh.i[ ]J+ ftE(y(D.`+"fz9#աDv,\_̲c+Œ >X+l)tEhe LWGHW# @B5uS3Ϩ(HUnw>7Pruͦ۰x~2lu_ְ J>[ V$ec5>DiWʺm3PqZz}9zF{h^#7{99;'@v>$KkZ 4gAԷVo-{j*T}zy7'9OGNN2W%5z!0/Չm?p꣖2JgF_k6|ZjtdE[|vys\Vo{=㻂6r9{gTQ}93a7cvm#բpA4qOu9aR8t"]?]{V:N8=껡vC9dt>꥔ʺrF_ ]btE(`:BRR{SuE}9tEp,6*F~7J2]#]YNVRVψcE;݇oazꞋx/0wuFd7{vB{vC)n8FȨ-5uL"ZoNWrh;2]}Je]W":9t"3]!]9 "ACW7R h<]J㘮ѕ z>u"箎ⳞP"vFBWֺt #]Em,)r+t)tRšLtz}*x{h= ]ݝЪCKvC9H]iOz `#ʡ+++B{(j7:0]!])|Q ]CJ#PLWHW:e+/Qz$؆z)0;%ۅ`}.b+{'{Jh,)8)ĝZv;؂Iܩ*r9ݟQڔNCkZ=9Bix=9c,j "CW7R0ttYn "e=닙''qP:JS ]\+K+B{(n(=/#]yd +le1tEp]1 "Q0]!]B(iP ]\K+BPFtutmpDW8 ^RރPjߓdqeZPė=\Y?>9{pr.ݘ՟ꗋOR:.WA;Qej+tNkenuH![ҵu>i02M/ /듿|Xӭ݇P$}&@]뒲MmkC麕1F%+s$;|J>xw~5Y~EK>ޟX߽5/7u\]moW v߯|"nJ Jipe`i>h 7?__ }Wnv;B;;N7^Gח-bnoVgJϯ&TOJ ڹtfj&upCs~ߞ?,Oxg+ד@ZoiT2IUjUk׈D#J6pd/+mƢME۞G -?ttuK]_޾᣷w z~Ϗ}?M2 zt'ħhW -C%)7vnMJդ΂1N%cR:ڨcRgWlMCm;!78'蘛NI3q@i6<7Ks**]9(ƍRimSxa;cgL猱e !4k;/tázYo?K[M]/N(JBla~#D!<[38{[*K榒އڛrR.UϮFK# ZA$=z:YѼ܎G롱I)Yѿ|> Oo^BTNҾ])$ƣoQH|\?rd| Jh9]l6pz[5"/-<#_%D),ޡ{FϤnP)c%F'긽,'ӧ'Eߋby&U"fKƅV6UcDy%SK2u2]`{ηW:o thg!~r_3i'pu>d MZRj+RxtZ!,vkTbLShEצK* M)2K 9ٻ6r%WqHǼ_ <AEȒֲ ߷ؒlqKܲ;v#p,l6YUE"@i 9?`d; g7\>!@֋G;"zkUʁϺ6l,:1o||3nLFS-aВI &d L31;LYYdc~Z)C\Q2l4Zxh$Y`\2\y!HsNT lDQv6݈Iv LeidYJĚtJr[ij.? riw~(4Rݲmkrn10e0>&E̊ A;箎~y< O[!UHl),Œ:JɼI#'Fv_] )LqS6cJxᒵI)K:cB :Ξ|տ{ff-Ja eUgK}`Z j@,٠.0@,RP!qB i9xBkr6pZ& !a j`' v ?n=wQ,9 JCI x಄'%iH g0N<]^)6pe8M`hb6UV ]dy? ANYzay\?ly(Wk*jr}[Y%rq2_+Gc5߯Y `UV>䭬T:ŐK{~ȊJËx.z?݃D QkxA" )K!XL)TpQEtc=B1JU:%wbH%LJ$p>2w ;npPEX.>7JG~ܻj8cn !UtVRzrFו-;VcvDV;,GXhQm#FsGJpPhEzC= ?ayOJrBkDIz`5K6֩`Vei")3(ʑ8hS An*a"u6=R>I# @Y*\eI? dz=6M?Z?ۻ)>"Z/&[gN#L.xx*9Cv1R1"_"TD@UV0NxHKAt#xg8V3H&yr "UKJd4x a6&MuۻFf;tuѼOqz^}Zt˥_~O#{+YbnVr)kбV.9\!Ϸ嗖{'t<q|5IXEw6.˜y2SkߝCW-}G6#W0fcEow7Y$&"JD|} o)no=nf5Rh $mtLv# թyƁGzLvΕc(ICdMs&O5t+ֳ1LRmlc0Nz,&Q+oч:&3ɤߙ-;RFAc>X)%J: ~t泴`Aʸ,$ڈ!y}qR67F7y: Jsfb^j(|T ^P6H̙`bVUlW Ե X̯)^ln[lW^R-:]WXl?EYکHʩ&d%5hV,d[zL+_=,գ⫪#+5ӇT 2r dU> :EQ$л.!k"tpfB"R"P*Kdp>\&IhoI3TVcΆay#M{E`2YSlʂZVWSwD-ݗK^m,FpEJ}@}%!йRvAsb Vf2rʡO˹@^R8bJ]kN_;w*|E0bukVu6;@e畎Y-qDbd.`mbpdžpi$Ks3> }vd3P.Yά7*;B$oi$ z=sTӨ})%b$&E Xd?EjјhHl' HOH;}㹙I*CRbD 1h^$&c\jU2&ʁFyq{GfyDךLz>$H/͠9j4HR*Bcd1d3繉kf67");#)6c\Yz[|t?<$ksXm|юђ}/dk5JZ Dv|:={;>LY5s&nϑ@i  `g|(bdf%摦30GBB+p;} g7[Wr"='W 03J hVǘ<*AR?i '}1vdp F@2L!"sK ޑQIHGAw]UN3m]exܦ L_Ȥ^^$[w7=(櫾v IY`VKG&. AHGШ L95Q1 6*xG;PIǻ^- Äx'7sml^t FYY-cIO،՗K0I'سFI^ ʊeR!)[=IКgX=]]UwU}!;7%s7u%&=%wTiy!w):ЯcؘH;'<0"ĸYd@rP5#luD幉I8oVkKqo&V|])rbߤ`JʒYԞG> 聯ZAӥpg}SMjeu=n۷B ^^+סY~ٸm[jm_l{HHKiahHkmP6~܌W[)+d˴m 9x)\K- ltB|=_DTH?32k,7ڔ7%\dxm &T)@6q{EM~* w ~5"}#8O\#wv={IiJ0tҼh%#xAoiyh.y.k,]lg/fѳo}ne{'מѵM3ttݥvoF{.%]] e^ ^>j~n f1cKh@-f|tz<̌ ~]2ހ~z 9{> m|]oȑ <qߞ7e8wחtl_L9eKs3{?ꏛ?&aPQ&niz!Fea/:/e vN-)oE)& sJ]B)Ϝ?T %s0VrMJ.%z1KoF4ޡt$6^^n5qiTSڑ+7{ &.IaBHB@*(!@^DdD16us0\ Vbr"$1B*i-pU0*Oj=Qyq1yi\8]y3ϵԙdǶLɖefA%r#$lԲLeRUc^Z%Vh:#S9R;\I7I[6x]HMU˨1fULO 6!y]6gp&拉\dj[jX cժBl mo v)^dfv=%;q/֠qWnRYr7 PR)pHe9X, ÆkD !@_X=%I) iM&ߎY0R&e+[jmhW\̮Xj}HԠLg,8IM$`fcN4 2f *!s /:dĂIؠ 8":*jm[~7Q cW,b58"ΉYC{d|zE:7L"Mۜ^H[!9Bl6Y$MED7N $Ǭ 9dg|BIs# N]FEczwYEG\Hږ]xCq,tѥwiś?2Ls x2&ёi֧+ v; R9sl;C\!Ӊ'#8A "sF i#,j˨5H@PR\[μ6"9+sae\]M= ޅj(5ry1`T $H'8ޗʾ^ C2P[yHF1U쭧?!/$ل*8K&' 89iB-MX3j=7⬯`GiVqydד.(yC٧y޳|$ɀc-ƆE,F<I $zSǭ:yS/WhoU?tGJֵ oqhO0Ouzz/A;E>Ub" e(%HuL[:Y }BGLuF5s!.APqn9_Wvr`iW)ȁLX2~%>C٘۵=o=5{V؀#&ɩR䍓"62dR4Rc fqޮEqJ8׏O&V˶$lߤ` $SX@r<_ΠW/?|/.zRK5YU4`cfY%Or&@gtuA+yדd~0IX2VʊYF'q4hdh4Tc!ݬy/)6cl~#G`ZO*v6NM&cZt }ޤ0k_7@Ĕ N|' Z7^q?篣D0a-gfجD4 ʿ{EsV~ _O׾v:#ۖcNx#%ji|t[!w6Lhwš+W\ݏ.O}kI{/Un||EBjnfw?^prCmv]^o} lz?{m>4+-wCIeݛ}G*6=Lu4~K\tt}-Ьy̱HRڢ+wi7Pir7pE-AY6M;ۆ/߮댣G^(Ij2 Of0MgM#+4G5&^(F|nhۻo#ZoQ &nR}/dz|*GX`4L[߃?R=~]eN%ԕ J`B85ߝ]VHR_~^ymڀ'YFq2̟$bT?vH)t?aF2WE`s:DE\{2护w\)4W`sUTUdURS4Wh@]˰6p0rIQK y9 ٧qW^xrƞӓݧ_\7RuO~/&Igd!^Hlvܗ1+߹+*R(VĠр qa\Wrtdd1łE2X) <{:PH{m]$a_erQ=7~_/p4^ۘ6C!ӴQԨ[)5RJ߀Cݠ @_S޹Xu.H˻ɦ3[ԁ~BĽsZ-2lɀ߂/l2gVGTqmy)lJ:^ tpep0A!ߤaYߓQ-|0X^rkO] ll~fr^^+.ӗ4-Eڵ/}Qͼ#'R`ѐM2-P9urc Xas KV9$cKq›d{UW.r$6Bthm&\1$=Ft\A2DFc:Xs `VY7Ck=w}ll\o֮w_*c.Uzq89kOD}bsB7 y<@6AJuq\_t6C6J2)$,Z}fQaT%97{LRrFE 5UK2JJD"Jy b3d `{ڐ YT.XMK2n,W@^Sٳ8Ҥ)X]H#య`HgsCGRH!z!3uݜO6 ]*X`L7Sh"s=|`s$7$O9DxP^x" )J  ^Mvox^8#sJ}cSחԟuwY!ʒ*0ȀetPHRBi5U 1F(\{G0 'yBy *&X0]#eO!$Wd5(׬a!F0)c K1R(Rh;ύ R Rd܅pĹ0 }]C9^ ̈́|xG=yyܖb;ݬ%*E_SÛ-4vu|A+mHO6Иݵgv]c l["/6Ejx67x EQIݪnUɬ"HOy"pa" ҧ"Wu(%vQP8mA:+7ƤL3T2PhAߧnW_]~*.|j69 Eh}V:Z;|k < +ƹw{{6,4XW7T/rK|s?BZU?7HX[aZGs ?quڅ||_s%\AU烫]nΐ/͡O⅚_oઙYȇ?U~:E;W;̋ͼmL| 9!܏XEw?oA޼>2%Mڸjoj& *jFj>i+}9> ~Nߢ=7co5fnX]׈!xnx;ik|fя8S|Hl]B!|l/qڱK\MQ+|`ϑg T0mΏbi.jDggPW_-"EJVo=s#(B1C/̮@D<#A.!0kIT4(Jrv3%XA,2S=#FPw1޸&;n*]^5%@yLvAuG+}(>kk5ݹ!bl{7 E/Ze4%VAmuSR @JϤ>L&5ǝIͱeRѼ-3L$&I"@ltH1 B9%.ȃRr3R*"𘰿`|XM{3z:wBR:*|KF*ps-gpi7`8 Ba[ 謺O@_ý^S>vEOԴ,^cũᨕrĩ:Ix) 8eYc8er8ZS \h|p;cp`8nT!2i5.xX), ۤX.kLQ84eA7-N/"2}No>ŖrmF㢳rH@Ч.HʜN GF$Xmȍ6,K,q ĥ/x` vn,ۋ[a(ZE m^'m[)(P2K~Tx=TB2Pg.%1-DkioANtL)VAD\$ƢGY!gK,MR(Hǵ,s~.#hʰ.(4,) ғ Rd1x0M㈄IUd +x!iFE.J%VR.Z$H!K4驅i0fon41 ӑGTZ)3<i y82b,U^X=/*OIITHJ[8=qM=~wVfgXll8^hLJk߳:@j-҆:p4t.){з{;>Dh$1*ݞ<1S#5x7ϑFYec(!E lTE>OPAP9-rm1A!z|T,gWjˉ?Yp2L'A+hXr₍@ip@8#M6AzI9IA@ExɸC"YPi5F?}vzUe$Q>8OM3L`Sq!-3{!Y$wsi/?SR\~K1R%7d(s&SH@!^"g^ƎH&c>kkԻh(ע;ΗrrejT(a?E+4ܝ;{O{-У\!Kܗx9|xv˷&I|<G2x`Qi ECHYJe)Y]#g W-S|@(J6R|5akoȭ%p',EPlND[FrT$LgIyp($qҡ&b/ 8ɝx٘p4Y9co>M>4ɳFJo{o.YKwNyokft|l,2jH.p%s%2Yj].@ih:*7 Q qTnjFet !9G\LH2T=d Br,@ &%:"vR(*Қ9;Uz(B[X^{l- 4f@釩 ~;pp0};I)s<5}c+c4SP6S eId(Ú*kUYٸnxx 0s,j9S"hfN툱 UDŽf)svQXǢqDZhmkނZQƢqfh"WFVPM4-f&%X< l$2^ DўG&Dx#(<.\Aua1rׇ _Mdx,v4'{b0D^* [MZW|clKA-)tb&WT#ZC QZ 6RpjҔ-Xqy#j49_:9;]D|ty|Ÿd_H E^/nEe`r!,5Z@њDHA) MZBUDJ{kzq/x,wya}FT2*x٬;?m]nFik?O6 a쁣fs6\ Z 5I4ga/3̑ sD:0=i9@&np.I^Pis5@u )K:/+K}t[deŋi~Br{!yEh,тEv]`܂mF)!ջ֔ne k0m t T6+'qhz[ xn} BZ0еBmTh4M#fh^v1PzsG;> 0ks?BLd-s@ jp_A6 Z$,,hqdPVi&E*-://cdKevrٵ.*M8y6-?F'/W#p=@t#K=Je8u9"r+i1nS kOgŠϧ>PG7GMt9.si6G.Oޚy5ɻf^&g[.ɻMoTF)F'LL$@4@Tkk;*W'D5( yBd"k1"fhHo\,=ף{^thS`x$I&u֥IMK,-B0WGn5ԋRZ8g4h ߝbƂCR͌e$6ZT0g[࢚8USk+'8/F*ZTpK) 4(JhԆDSwnSZMY,wU.Ҍjjp=f#Z:f#Pw: x Q3fWDoW`Mp%_'SQm<u;qE 1=sZ}48tg#|phЕ~)Z2\]dY(''NQkS5T88\+~-Q*J !c<+ % 45XG NYd$ǿ_NgjA,"( Bp q۸@uGmz6q-ji'Sl癠Чq;Ngﺹ K+kui',`dd\.m x{rI4;p{B4 E]ƕ By1$< #s[CA]vqOg=KGf<"G養>PӠ8EYEݮ8{$)!Gr.HheQUbg1 d@gz\wnu/D< N8w>Sv(`%YEb$,.(srmeJfV׾Ed2aGt NC(w=󵳞ŭ?n C,%S<.cGt,O;NF׎nc4,-s]}MIwut.˾ۏqInz]nY{zE7>EоBnmZzw^&rY実-h?!ms62^Mnj_8G7]^֧$x%o~wۋw|cǝWܾ&->k7z7:'9ިjKs1ťv{͓fΫY#A0{Ca+i\aVQIibNe7J1.rHbBLz%<km{?hC{{tzͰ6iW}_\I39 <mzAMڋڼ&E\K_JvF0uMڋ5i36iTQ-!N;Ngﺣ$8/іB;mdKkmH!8&6`75j[kTHI73D=(RSI2驪[q"PA8d[FlK*Q01YΥf%eܫIY3"ʠ@Y8˼f9 }-q>qR8򐸎>rdHNCRt 9?`*se͵tn!)L:?X)nks.dՁ65~[fN{=VQ]zu3^+ʺb”g֪f<:@\V mIDBbɱZmҩA:5Ref 6[` (*lJC@J9W%uHԱn Qj'IRN|T=Y9 9yTZӹai:jdZ\h_:kL`4g$c2j}LMI1-h+:Ѣ8>m+/ɩb% 5(2Ge] LeYRK@FV=ܗcGP{diur8M&b kBsFȬB9Yo$|8RBD$Ǚ!~$!iښnHHL*^Zc2U1& j.Fcq!$óeVd1}[C3&TVJ!#RCPd9kު 9aRscLL{E*l0K}yI@ۇ% 'I.5cKTokf6P)u/Krk碽<Grm95k lGhIm`OmHYQ6=خcG&Y3g2 ɝI1 JVwǸY*ӂGSG<#YdVڒ6!^Zs1R9y ̑s{ךo^&{@-̃T$UZ#is%8Ozыdp &Ľg̤2-2'ue#H;R*j1KySDd L;Dpz\X@Ri# -[s櫡{;j* jHE)lBezY+&hwȠ8vjӹA$sv.-.!fENrEvЈ/S}h5={gcVa^Buwf?Awn#CW(Mb7ͷesv<:ZPw#dMV8_?]y{tWSDmʺz]k "dƀM&wG/ӷ_ILo8/?|}M"w7t&2{FkGlaN{"}>-{:|.L4Ci{ٰ.>!u~ۇB oZ5 z4V'~M4 0Rk-_.d!8<<ħ_᭎ޡ“v׏Yec@"Iyښ+xv)5>.Ay*ЈnazÔ>t4/wͅUXQ/}}9F?xɰ?tDJ6{= ,gE&L:ruEzM6._R ]|),R"M! ~hg97- _W'C^z|)B3~1iD3[2/'Mpsw{~IOc#ŖMaaoy K5t=\-ҵ_M\={Kip)h!eQ u=yWuB.Fc͘wyŲ,z2qE͞+nR7.ŴS䲦I԰rVZq+9 \FWFuG!+s b#2@G>cNy#:+ 1ZcÊEUrUxgm7Iƥ(X6C@`"$}b9AG,oLd7s杗,ǐ:BP`AFl'V]r+˪G+}NQ*iIcCeM{('YK۲o bBi^lΚ2|:EՋ]k{X(RgЏ?ӄ%[ᇟH/xr~mb԰Jd,{U鏇 4rΥ@T۲L$]>QeZ-%(|$i$!CCfootE>Y+=;GT'V>m^?ugF_|wyηE69ڬv2ԦVləs߿ z+f֑6*)5 KQoSwS@os;tw~ʙ1[=͞94e)[gRƖy!r !\h< #8 򳾥VVs6| 6ÿ=F~YfSkD 2цYЌ(0e:[k$$SF  .ېxWޢ|*A[r سomsŞx o$~4;-Xä}_ZTPl֨m%[,o-G6b<+K98CN !'lHR2s!JRd/ $}`t`M29 |1)(.CW=W \!ha*jӹCJ=$T<[Ԩ=hx]p!Ⱥx 4S;N"ԩq;HN>}Ls/x(RLي,r"aN|?.qKM <󐸊.r.<w!BdkzK%ݥ?W|H /8Mn\ղ_)g4/bJ&U޵1GDuE!ݴݼJ`]7IL_iJ6͎|/suFS #&}chon~:uُQ"[r&2i29115 ^D駑R_k*񙖍ϬoPG`1J &($КI;lJ)r 57れՇK5 :laclNkmoqH o;utkIC$(d'ܖlb/DO6Y?2Zb񢴐B3@5XE2}CΩI{%s< %)׼BiР3/ %ĩɽ"gpT Kk9 B̸ElЧ<f6QfoR߼-x0kM!%6^}6*ݰ[#I{ii-x+F횴ۻ&Ǟ=;N x4vxU ?+Dk l:j$dhȢR팂:Z+>9^$!$In.:j%T'ٸV0>,u)t2v}}G`X).dG;h6m <] qin ]Q_NF7G+c<8sѫ7pm~lޏ?'-(H5=^!r`߹Gd. ~[vI\Q.T Z>;4zN:(hO_~ۧt?q뽤}*9RX s*9w\Ǐ !Y :icQӵYf :7;(ncPܸl*kΦhָ"|PސRYA46T*0jP\*ŅG#'' s.P4e} yZ00YDAĹ;(~)(n o[/fN(~.%>}C-Jx8͓`=ue4JM|Z: 7뢸b;TТSi۽/- ߡ߃uF*Z:gQRtqi" uۭ#2i FT\$!e_ tiAdo%td\5q&׍͂t6k3)zma,q/~!9ZoOoڕث-_EBhSEe7ʊ`kyb:kTE* ) F(@|xEz`x>BvEǂÉEޢJ2H-计g fWK&h*0pE\XI0z۸d S @  (E}l;ך8;,Q )BgK!ZD|<١w}Dm:-5z_p< SÃC⋍ER5'|gS:(4C LfCVGC }b OUYVީPt +KPhS / ]qDz|mO|GOz_ %Lkf.5P+2,I7hItJ;nyq6G[zo[*b6)j۳āt!dK| '^ &-~@"0~߫n*^kp&dঀy +U.dFx ƹp5RotsBsBŶt9}攅|>\hqm;2GSf^u- (g}[}]m=ݏ}4WK<|Ǘ7rU᭵)ѧ8UtҊe2O9<9Uތ_*-O??IUܮu7߰W^)'j2oisf"Z.WEsԼt MKQw*zƿ1&x8ZY|8_UlQ}:;Q:k06D[.lb-/A٬frJ>[ azWˠ>$)I0{̍+Xs݆ӑhe9ջJۥ^YC\&8:ӂs^V=.sڶȧxƶSO\?㰟pfϕ'ڰgG~;^܈Ϣ2 'ėVxm-ǬMcƤ My|B\: ׍=^RB`XOz,sO 8~fJ#iȽJ4=g(z\5pG< Wr_@N1e:Ł+o3V._}GaǼ:tnkWY$ĩ`uz_<`8&f#Ηjb!ȅh2N޺n8cá&!ڈ }#$;J>,X[QxK q#"mG$o ݗ䂠Hc%HN+H5:R+IBuH~d4bP"ygޞ<~=Z⸊>'.biiHޣvj-sa] qjU{=U ﵖURMUl6N!% * Z% ;7s!j1n:ϺE Yw&msGm* UBcSPL1J3]u}]UE˂7Oy]wo:-/c6kG&c]ۿofxkqECxVH_(P !4pڤS*$ tX؂\.ܷ)X[voD&5MFmֈeA $\Ӏ`pT}Jw] }u*>>D2(] նM<Vcazt?/߷j5=kz"1")0-Ṷ>GV܈@GÔ5`*2U.iV'W{˾\JD!g+J&0=uq7|>;ϴc8UXc-~N;>r~oE BZx@e( @+|n(lթxBo2x|TYB 'UVBDT<ɢΥfq,3(F”Fw}ݖs?ЫUƓ  ZU'k<*BZ!E$S[{v@' %Cř ̚2MXȗR4BNL T]c*P*gZ|(Q>~N̊\)XɤhBK6U>RҚ(*.T]с҇O{ўr{T{^;@*8B\kBJ&Zo+)Uv"jHliֱ ؂sK*V_tm`|p 7/OgOxیvI/ .mH%:B %k\|K!1n yJ#u)&p_#0:AZgeS&]!qFB@֒mtI†RL;1ߧ6m۸/_-WOZ Gyltk>;lq _kyȣzw?gvc~wc{ZN.n]u7b_OOlV{nq#lgLHZ!CZ>>KgёnwuY\¡gR&3Pǂ>9`-baIM }T^pƤRbZJwm4s-3ꃵ>/fn'=A;}WkŎeÍSwv=P﹠^tNmz~ŏar cEioI-kc#jWv7OJ\Vym-8ڃ/sя_.ly->߿Z, 4U}EOBb\W +k2]jz6قo9W_ݫ?|0"$N!<9vOwmr?VyXr/Y4۰]}47s=Z]&w9z7~-_po]u*>b::iOw?~M)Wgu2 hV"rP];ߏӥ\8+#T|Ivjmk! 8:R#ϭ8V̜foӟmkxun4pzNPdCa-T1@TrJ$PRT] @Y*97,fk9%ݵg=fyeno9Lᘠo\*+`MD[o~A,eCLAΊ˱LlSi&^ 0  v3ZwDO@!;dR"kХƹת#_|-F[,]ac`&f憎*$!,xjESM@Jac<0N=ND1(PX栌#$r@5א9]l%gJ>mkV3A~vTL֙m% hOT̝rWTm hivyر)Z}hؑnGj:w{ɋq#+ AZ?>Lj'. UԘZ'D^QGRsRtH B PK)c,9fY0d qZ ,i,U1R-cZf8[]͉[_2_cL&[#BZFC5,)uR>X,h>h( ;JѲgpY(h[agm9fyV):IɾvQwz]KJJܼ%QLQ cJ.4*mWִ91ŽTa7Bg{FLضk-//#4qnfݭVicg~&pͻy l]ǔ)XA.sq4kHlp*s[=\yءMUu|X$ۚP9sy-x[ٚ`]\ATnk| Y5LXǢ|V*-&*QSUPZnG}As!ʿY_C Ɣzҡ22m0P*d\nsAS Ee%aZCo$8O\&`&s)3Q^̬[QxׁN2APk X0#d#yV] ͪsQZo3djlP|ĘH!@v몋EbA(P$1+<5ǔLhj}⒬:R]2jj[٘JuPbQdkU$ͥ(}U N(A%r(5ݖ 2Y/]Y=lL#¨R0Z\{%ձ̊GdXh4*90+!eEzL!|S^ej|Q7)K B6ma5uz]_ B&"`4|~,ngz$e,/dgh3s`ysF`;+l~}s!' 8T"x5z 0e\4JO+(5J{ms+`FSK16hɩ7Nl[moBi**n}dYW$ [o &`t(`**@\%zL[JE:~8ZE[==/Fr7.^)>J=|ӂ  1 $s€X3l*|}Zkm}/`&/cY\A{ R\iGbwƗ}~[ 'U•dYFgYIt,68٬b\ĿGl_#Z ΆkcWluy]^ڋ`ov;v{jIL0p8(Mr);r&끗;<L[jKEN\ccA//lyEܝuz~AT>w=ޏ|u_yy<,̫{~? \_ S>}ifXV4791=/pLn\/OjU3aH3EڌQ~-Pwm9hx7n8j aE*UjtH\#qOVZG#t[[9@koT6UV5w g^Ngo'֋Ǜ}G=0>./^ιنt[1zH[@z,m$]Hwmm4qۼ_ a' Lah#KZKeeKdb &Zlv]UYr SK1-ɑU vh4N) 6ɝQ6`#*$;A UtLhs/'=.Qyy}ҕ\WtyqXy")Vh_'j%bFX)VNL_}calhX9/I4#-_?^?d޿<.)xtQ9&9=GIPdܒ\Mzb`llM$W Rb3/k6%}Ϝ@/,OW &X]O.GxѸzqA߼&n1EfXTgT؄QY:fϽUk ?R(hSO޼zqUcgmI 9Zr;xrI S>C=U)tʮ+nqPn{fʲ &uD9hkc haLnf.f!ӷc`>T09sH[ 8SbTbeP&.{%O.xrZIwݓKT*yrOГc{W`\U]D%Q\=AM W@0#to*KUvksJT6:z:p% SGpc]%rl%ju*"\IA9'{W`zo+V pļ'WJLXW\!j \iاv !7p{3ed*Q;IU 4Iq8%X|!zQq|v3Z) |9Dy 1$re!2 .HڇYczۀ5m,t`*{{ n~-/d~آ:WLbiVA8inLJEh*8' >r %e&L \h ]Qs>potJ]v]|5.8qڼ/szvU6MA(0cLEJIЦT_cdedJmȒSr"XRBohj K,aOVȍg>'}< 5SQJqZ[cJy SQݿm!> q@=[E^<\a+䵗Vd=u0cbKճfW/R-*Nc*7]*2m7U"]%8rDE+. dv*E (f @ !I%iㅑJmc}J!LE @xM)-rWcpѰsZD<A\ -[F{ɨY*!A9^QbW]ɑ*4A:IIhVy5u68*iX V/GpxS>pmy op(\k]f֙*jU RYUe*"5h7#EDM7{, 8w nQyoՊuO]LBAS.O^M%F^hSq ]$KL'c;$c]G]nku-R[|O{Rk{Zc9Hr7_ \|DxZKbtSk~EyF̸\bυ4tyźz[|5kum,LlJa^iGjd>9l"0Yc.*5X@UvͷnRT.05]rߗzi7mä|Se} Q \ {]/{0<~,h_Tv[`VJް+YbG Ks^u˵MStSjo{[bn-P\vYZ q*fi'V*l:QY]7k v %qyå xL8P=d:FY^1Tk%iՎubjNg{5oE%%dwżiڭ;SL=jQ I}_} v\99hh(\+E n > Dies98hA j 2M 7Q1G EέRȰH -J)'RHD8Ԩ5rޮJϮ#4ӃOl]Z7| Sb؃^ϐpE-貇9ý.Uq[ug7x˭%xӮƫ|'˝!7nw#d[>>Ak|ZÎ[b$C ;M56VRFCˬ an]9m.#X߳q4ݾ?3+ }&q|+)iB/l-,]ބf\+aJaƉ X:@#1VcwZ;D "$^ظ jՠVH{s]fT3B@ӱe+P%T8 yS>KlIY,-,xGI4 Ak .!!{y/cݧE5F$w}9Ek^=A ,K]|?rV&D"hI<xYdO N s<'AR!OgqƧaz rAmCm*̅LS00!O+ȓuϯv 8~`{7Qr E @̩@^ ' )l֤~'3V:oZ,U6 d EZFQ۞]M 4L-Kmx.|8I;?~Ta9؞?_O.=NCQq#S&)'?~ӌN EއUg;+K['cɟˤ^MJRӗegznןCe8k'sEUg/^d? D9s#ΙT6ҩ\L`d./~EY[%?}IrO:]&S,MV=n0sǜfva.AGƅiz奈,E% E,,\i_Af(;m*lM;i\03=͞Ep |Px̚IYDKQYe"zY(W`*%\g`Q۫Ab">9yȌywơU}_>M |Wi8aEH*Z1D@c+BA2)޹W??Ed^C+y`<F ~xc vXk^oXnSTXrv`i/4ouF9lƹ,J-՝GP?4&ܒD={ K 2VG3kPD9B[#@p2d>qٯ+ոR5IN~>ׯu;_1Mo]kv:~M'5ܧ?nR1]b4Fſ'ߓ^ ":}ށ֮vz)p>%AWA6I՟ՠ=xW(崧Ae]rx5j\ /E)Y ߙmSh~)L{ C͏! wEvѩo~vY٥7hgϟ< @ci1.EkTQ'}+IP-iإu=N,?F_o(w1IU veoqߌ?mjyuF{f}ηHk]4pp$'ޠNdVw\AV=&lEiozݹz`ڼ@cU:2|pmjj4ΏoW5c[Jq/ZO}VKq,J ґJ#} !L1!R\wݫEWV{}(tjVe#b"ՔY5ȔjR9hj2+\ВWwl[g5J۽'ު>TOo|]5T|yTuS?Ճ;5Fj9=qZnmzqh6KvkɋkԒ{9N EG& ,ց:mKVQg(+A:RIMjS&aJ;f2sp-A%cG2k&m)vN;(i1ppJy8&9+i'M:h iSb\޵ u0uv:fy NHitL*"Q* J3K`@8N^ \@"+1iɢ>x"XH `xS.0xD޵ۿBkv~w#vdqqErKƢeiM Iٱ/Ç(/QMi$O{ehX3SU}Nu=4Qz|H ͨyNiY# =;7]|;O9#\^? V yisǧ?6z$E| YF 7Z1 *ѦOH>gIٹmeIՉIJR2H8 KJ\uAhJF ҡ5(XD3: ǜw7ťIN̎b̑cIJc0c5qD;]vҚPnЕ }TA0X&IUUTPZ!@)x" Q$cVxI1 , CH:;I?k^~ E!)OFb9ō[\loCiLڦ7N4a|3< l n ,[u_H'gmER[7]o?t^& ih%VJg20/ 8dȃR^Zohd[3gseHYrA'H bH^DL&NF"Za&'U3VggUUҌc}!T'4.xK`xkef??pyx1p]?Gen;9)8(RYcKlr|ǖNfE_XF( 半V@sE> & f8=׮]{YgǨޛL6Jh&3G@%0ޛvX i!sE,:dĂe!H>>@sH$R,beXM%JǑQgGn8&]W&&?is*ъD!&"B츱Y$MG3V7NZIٔV$?N% 2I e%ZvFK5q{v zeo.:iɱ~W_?yF˼ѕRd;kEd.3nPjdz)Qh7GŮjq?'pa{ϛзkAE imIp}e? iNbK9ii߾Bdʴ$njkTBx)1 K?}2nJ<8qQٌ|у|g3J wH&KŃ6[c \dyP1.o2qzC},QlيjoZmo{vscCU?b<2p\C"eH f^3@d>lLa }CV1z}wӖnIsX? ~VbŶz\/^bf\y0w\oyt2\xݍߜ&]h*R/O=yLY$t 8Li{Hڹ6 uk/K71B$Nf%QF1$hDXKpH#qN5u"g4Q\MI~8x1@ (M*ue ĩq].' HF]YMg;.=ok:lgfN1yE,Yjm4?U,G// S&ZIhKVz /߸Ṛga;{=}o^\o^ؾ3Bkz. sx3m) @ۋӖ7toT}s9bwV7՟)]얯A-G:<(Х-|qh7_K1Qz\z )T2I3O FoH,j&z NkfOչtz*uXy8x3l)QiOzr@q, 67J@2%ޏΖQ| {Rd#<2ZeK s"4mwE|LO7N~M38-I+0X]0!`Ҩ⬴ۑ+:0Dҙ&`Q{yYY^VI,o }*._~UWh[9wqL>t9|yN.Oӏ){ד q=&-7v0)ZE]&I-a-qVطƯ^^u^2}Pǿ'>a& ]z;rAkW:[}®oXVG߶K`~7#F_|QˣyRd4]V@ƃ<]OѼԻԭ7xɲϯފB-אUu 6[fFEM\+75DtJ.̨_t*d($3q#0ڈrLA;h!KS3e $3zk%\зZq. 9M(d;ho ʬxn]{2Dd΁3kf/È1 }|ERkR*f6I%oHQVMpN|P ! I*Gis]Jض\a;W|24vVwI)~ʁ6gki ֡4Ïi6*gr1׃ߦ9ݻm~n~}?Ċ:ѳʗ;@U}Pkڃ֖[pjRCTq%NWl\ҤQÑɅF!G/oo ze+d;d1d!9epf#"ʈeg,.:i[`:Yݝ~5Rj $Gv YqS(`{ιk,`j2B/[y͔4;VSzϮlfD͋`}p ZHt6r־tFAe\Q*/slΉ:A;׹uuNt=-R3b *kB?FCL+'$~pf9bDNit'#w LkDQ-zwgK ǹ_jv;û3ci~>uuz  /͹~>%]vg0:U)FFJUz24Dn )] "(_E TU[%%K"c)!f,,1?5*ںeD`JѓqI>9k+I@Ui>FlNޗt`9"DӛG=X&t:iȱ*[Ӳ )M{J8妆vUkD8Y&$&Ol57ua׳aI1?5(V0z_ޥChbQ<Q~sn/߲%t+m52. D0~-Ra]fѓ6:jί?IxQH\(dEf5lo L ~]owmm~J2R_/e  @&$OI`UMrEJTID]PjaLͰꪯỽt64E'Q s+Lo93Z?Q! ,"TB`X4 ZTWH`(*3;HZimx*R.M]wsmv]펻*>t$wUH ]Fw,VlUkWk̮"->$)%5+m3j 䮊 w]i])9z5X#.DKv]⮸"}wWEʥ㮬Ecv)wE[;+aWUV])+Dn쐻BqW$.g; i I)T _B\b.Zy؝NZ҇o ]}1Q,U·|_[BpA(t10HjL 2ǘ]n( vWϘ0z)$s[%I}7{Kߓ*р 3(R*BBŬ Znڧnk5k7krhKHQt<e4n@%60.rP7eLko&Y%m5}9KRbIB-A>0]A);o@Cmޗ/Ѷt w'ducGOlOs\*r0d#dEG1$=X$ѹ,aX֒B$PUz\ &ԙ{Bѡ@E%4*HV;sǙ_|dGxJǷWVQ8)B)ڏsG}um`^ `f)eSBЊQ9Fʻ&q 8kȄ;6 G*9(3LTtesL2r41 "qzxTێT{R" 8b,=Y1 5 Ec+qF޴OՒ|-+Wtg4<$kLii'Ņj zCL1Rd'r{ awMt%]MMJ&ʴH;^VR1JeWѶr E8RPj"#CHZb)^P{l 4L3Ze֛qМ7.0 guR'! А7,$?4UyNM55ǐR"CbPBi%QFڊ|<fDrҳҝjwض1)BeS(1R>/QOJ$ %ŜK6.]Rqmd56qS(.9!DLvGeA9I.[UHK 3С@Ēw;3x!9I|.K;u}gK;(vO=D\SV?0)g{ºcz^v,l3-PKL F4#k@ԠDIqDW;[  ϊYu 9P̥%ʸUAo5Lrb0yL50{f)mcvAC,ǔezYr"ڲz'x1jg.A8|~9L2 mg/Xk,7+_2 _ޖɌ;Onjr@h&HP)Nm畂BJg|ZY^L¢҅hhMZ;\iJ6Y`Du%&ݬxT]SYże+ISڟZP#R vA-o_%\bn bj;:?ft9> s x}n~]F>w ]K-6YYRv9G,ْJ#h@Gnx7zxs5:-8z !$L[Z|2KD6P53(:>txFӣX]ZS=K<{"Xw XRU҄m:z˕+CKty \waPJ[9-{z$LkFf.5Д]de%ݐG))\JO'cwflѷƺ -}Gz(:;w H.x1뗠IQs P/[덕3(R*BBQxsBT7p7ξƋ{Ҁ+E'?ORryL?,QgVY?/t, }?⺠ MUcI5 I^h: 0YҏLH__Ndxǯ>uo:sixL>,Nw{?sR}08~-kObgǃ*e.Y_B￶AB Kr/Wj١No\5bm4mZ:Kl@? ~mF n{s] 4-OY|Juy ./uhr6&7=V}`})8+,p7Sɻ:zNQ{&{; EA)|]趢zu5ܧ۵[nrh4~Gs=7WGlqcY\d2ǿLmXy \p|DziWa\'](|٥]nX09\pQ Fɒk/ٜEqx- #\nbΧtw {vJTjP`\=O+6lBst'I("@횲XFh9^Kw/}POd7eoIw2.ژt 4Ĝ20.9)'RJJ-ʶJwNy'db|<`|Xd]MI3=z!K%k٪0|KCB#jU1 uF5Wpt_5+So[j캥?{W֭'!9$cm޽_ &jlWӦ;Y;f xk.F:'tPR(2oݫZ./ڋݲ̄X ߅s3j&+ZwAJ R2T\>uM:YYCk5Y߀7|ܞL}?׏Ð1Y'+~pǤ:PvFS2:%ppA( (^kv@&R1 GlIJjj[s9[S.8I9%^:K|3 %I_',ŠlcLG<ه ,JǏ wX# XόY}#($ y]QR&Z )A6HP(fSF֟R*ʘu`#Ap6gwFu>@=uӡ7ӓcxO?]MCP|f /n>R5?_?試zp4 -$L!I[k#f2,Eb ÒRIR1" )srh/Ѿ0HFflGvJ?4cW,XNXI9n)̯g)!r;du:pTT-&B\rg;beZʹPPN}`,pH9E!Rh6;f.G"͖ /`h{>gf/Yt5)hUV.ZvR1ID&oَ=_MT`<Dl;"2H3D5 _ 7&f$3V:%ObLjŢ2;M)"BX̍ Ze5KƙBt:l%Bv12VQ#b3q|>P*!u6Ӓ]qQ6Ey.>x[ӼEó%U%\ߠ4N$VhU8@8Sa31%"gl?Gn] @f.XXb;TtgPI* $(&+b`Z?)Ѧ<0UpWn4Ϫhֱ +"cQ613UA% "R!=QR5O>_-1r?W{)h3q TضKbur<.w3X9-}9%rO|,Wy8v cɝ+: ;*u9!(N_cek`(+@+Qv*ESL]W$#qE-a>cQml; zŪ4uqL2O,dg 5vLVE:{Y7fJ>үbu]Z_w9q&o zy;u[ZZxz\yҩI@f}lWTTOoM}ʿU;Ͼgǎbgg&ϔ-.^0^s/)6cHLѲ[ѭ(Xf3vR}?Sl?lzB?أ{-;Js |~lF,߸t2ϽK2]1^I oK6r}^g^EN,sT?]=}cҾ0v(g2 '73D}~LF GB#.4VEMtUWZv9c"ҫnqwԏ+<؛oPɷ7ξzڝt2yy+Y/:ws-^Lt`>]yV?V{n}pǖn65k/bG0/`}܅]bW9{nj+qP4$K/0UTVi%zvJ5LIJzBB,3B #(Q),ؤE@DTNJUK!쾱2IvovSucG3kb8|Rd2Yl ꞴTX`E'!( h RƌF`1M-84%&Ysx3iw !P\Td/2тg*QN#L 2tshb9łJBRkb8Vc*wQL2D:sOɄ_;_s '&5_><rQ;7lx~t}OKs8ٔF\ܷ!*b0zOe7})'w[ w|auNtK|q\貐vYfշD.Жs,NTHWR zw{5 $>SؿdMbm} ]xqh _1rXmu-]ͧW3u;H,/u|!{z?ha|uſ-iZdZ^C㪏ʔk.K]:uJ=žqe#4-FfkCQͪ RGhtS5vidL'7# j":d;t\/)*qɇS7#׬/uoGwe +#hwDpUF::w,pZR•\U[UV<R ^"\ ɻ+A \Ui8tRZyWhc+4 AUt4pNp^7+[8sZZY_33=ɢV^v(Aib#̄95pCgI9ւ?+Z&n{hdU\Ԧ%`lUw80G E킁Vʇ;C a*nZ"sCmğ/5_Le0z{{a#k0G^iW\ʸuIii K+5:/m4_r?2yיkX?RI;3_Oszk$Onv˔_U(HgWʐD o@%dC€ -!gZR@R-](.>  T=3E>3;:[H#M5$61X!+'s[/"H "k5(erӶ-2oU8djHJ1&_.,K:b2, j,2Yj(DlZS~.m!YX}b%Q{tZꨣȎJ mlhde֤"2&˄@A !Cm  ` 6!W2C#-"u.VeRHFm& ~%\.U=,k!1!–^ #AO?չnCTD+ky`u!h6N1 F>[KAd(QPUG #D$zxErX WH"lu}$x"&iQH-mIZ M1"w=)pV_EYkyZ"a}Vдge->S_Y}L ̳K M0YK%<|*{4ٵ"mI3RVcE'.IE=:ٷ`<@}-S>,aI6UǞf/u`SJ~cwyf1yVm"l C²!.kS4wfkFbywYW&7^F6e4WEc,򫆞2NڕA]@M\.̈Źy"gYZelQASN;*0(zͥ(]jpѦX mropPCo8*!Ms>8OFWg׬BOb4)PF&E W \I#I:S,`ŨQAQiC}W:8Ml#j,:Lv6@F_Go{jpB]:~c3ki?e ¶suDa)Hb3] iI{^U.c)[K1NtӄkGaa.:TjMAw124k#\=+tbEYRp \1]!^,x'R (`g _(4u `TW`(Aŝr֢W`J*׽699`W^$ſ5N8T\ռYi:J(k)6`'0 dbcZQ8CFuf|Sq_nݪ?R5xlqŘJUrp%ADŽ9w&dⅩxu?LwKlp_Ղ`U\s] cp%q{ vs@aNdYm`jmhqU`+a1 %O<Ƅ[ (hbQB Ao%J$JE1r*C,bwtnFt/1%WLe:[nm< cx1^,TǨoTG}^ }םSLm*.INCO-$߳ݿ==F?"Uv!+_`\}< _{`#0{ ;KrIn1h`/Z@mJ<.WH%h^PQR"4epE0FuǎJ,i3܋3:s!/D("<)يGW7 }C  `(B֚&6.o<Ƌ9+BjsS wqk{bt~}6\LҘX".0XKXTf-$V&T$ y7Pb} 8qf\-Q \gt]O|wŢ}J+e`L}ԋb|x\^^o->;`Ȥie'QaB '*6]l[?`y򹖉I{:&ݬ\ztl>o5RϺ)|=R) ď]q@,w$z$Tn-'IIE߿$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@&"45 lar0I %Y2 #$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@ǛBKa$6[hs~$'2%Ic%S$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IoHI !2$6S$M@PI:$$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IOG}>O|{Qo>P7pܵvuIxqOKv ׸aKPKj%!O]Wl8bz\Zz\- ǕQOkz)Zh\-:,ʵtP\-TABq]ʨ潂4H`i\\Fv\JWG+Z;nw6~r/Zu_|:xNqn7:N2:*7S?|.ݬyZ.I,}@[n;J/q$ɽ M93}n!N{meȪ›tC5aۧ6OpܲuG@Ѭg'_nuooN! f2tܙgʞ?'~ۓ7omhwh۟qAmό;3T't'G?ښ0o`q2a|ԒQk r 7@&@`)sXnR js^/tZpu]qE6aa&Y'YeYcĕڻ88_ k W,Qpjqū h1crj \y{apQphʠq4%;B"ˍ~\AvjgA(:F\a8ib a ڸ'`b+ĦקhE_qL.xjZe*[\is^+K: +vj\\FaUn 5@`z\\Gqb+$UpJrɺ/-y}2{i>9^WwWt\hӫwp ߽4)B4WriXp`yIvqs'd扪JSq%MȔZy۔K+S羑S NRЪLdURp4Mv=.m/dT88{?זL}Q J{?}7奄Cd-䧔<6ynu@aŁ?:cT%s8]Uy=P%gy^JrRTr+9Ve{J64m=0\Qpj'Xcĕ'5XZv\J#ĕׁ+y r)VkV1*< ؘqXuպ?4*Ljh 鑊A f\ڸ+tJڏW[^ Ļfap׉FRXe{W<ͩJj`qL4h?*WV 26%3 [5 6r(b֎+Vxxpe V+0b+Vq*ĕ 2OLl+/IU'C1Ê2j>j;I=V4+Â_a}ZfUZ%}i \Ap:&3 X-Um?)իʑ% vbA+Vq*S\!qJ6hYvkFZy Z!nkܠeQu[+ߠeA7*x ~&}fSpigfqk@mXTrPM-SGҥ{Jz#/G;<9%= X]1⊼Wn$wZaprMWVX;J\9P RWֻU(:B\]`K+VVApu ]\(b }ʵ=.z\G˥ad~S2T1*bW)$ey0ծ \ێtsWC\&;,S"V- JW,שQpj}X;Xe$8(ozb҄ h٠e,^mВNT<lN~qGWD")sXa)7@vi.x|1(=X0apr]W6~Vf2 WT#M"Sui\ڤ֎+\]Dp*rd߭s8$2MDJmqO+R]\G5bVcUTJH pr]W6ĵU&qWLjR0+~+kqWq* FǕ{bӻS\`/G-ٖ9MrB-}O77EpY:;߽{{vB~ц?^`.Jkb1U_z.E?CtDCs}&U=O} n}Fכۗ_] Uِ]MVlInzijֳkMl)k 5{?%_?|> qY 'ҽ6ygz8_!!9H} X, @vcf/)KRV/JDK#ǂDsz._UWW hc a D H:1>19){?Os5 (:t|dtA=gؒk!^o;S}pHgjsJGS{wNˁ~v* Dt5AB"BaҐ{n\J[n<}[rpfޘ?v0fuzrjBKx|#""Ȕ<:)LP+ 0XG<"JϘއɝ!&H_k,rW3- YbLQH5 tyvF Hh22 Xj"ģ=X*E:Z㐻JmY:=0;&UݤZ4Y! tO4?;Jїm4]J"וdB$RG$C?gF1M&z4zsd4z߫?X欔ટp8>nE}PzYvq{M~ut2 s9Kٴt4L㤟?6SzsGC=zQF7޹iH^n)Ѹ\3=y  ~HqB7lJ6Y:2Jdb,R W[[񊬁V!i=#x5Sz LN7|qI7fUc望S3IwJˍrmzⰦUPy$$ АD(BTV @P"grdsȸY4zsI3dD3I(ق{Cȹ]UH{ (MZUi6_k&4Ϗ5%%j@?b<FM[/AF˺^OoǸ|coU_Q+NI8<g[5+ VءAGi ! i$$ɭإ^xwXCp(VzIlXۮ;~x'nþh^V<@vw|{ƶmfSThF}m=2T+Ǡ.Np؛Qb/H'Fk=G^ol{wpt1* Lx ODW:lyYvATAk JjPE݂ft NhJ;HDsS0"ۢφgŬ΄s$%HfT VhT x>#KbZp[}{,2#MY|Ҿl8N>#7 eGzYR"e9{5 ƓW Ж3 tIN4*Ft "q3'9 ɡ<ϤVAjov:$e$Tc:ĝBfmH2ȔȜj+FaJ\`dc̽gPULGd^",7D8n6!\Za L>čW'G{L*,W:˘Gm@0%-H ,`B 㑳!{EE!rsw2G268:ɾ"=UuۀVQ.D@rQ|i8>$'Z eVI)C銜\ FnVNQn(ȷVW=|폫+ו뻵\lUﻲD0{0<}Mc!'BZe`Hv:e燪yʪ#(Wԥ2Uw\TpUJ"D,u=jgHky$/I9Q2Z[ WyLe(념d:g d1tSZ9RlIkҘ[,竵 v{KW wJeR/R 8彁XeM<@VQr;@&2 XmNtaj5 2i FT̒ې tiAMLGf̵ hS'NJ 量Qzဥ| al0e!9h˝˷O%_M0zAK?&+Kn0OO XQI#p\ap"`TD#^^q:!tEǂ#OXLD!HtQ0UI,[,uL fx*`Id$ZyAr \(E}l{ڠ5rv%$Ok7FgҼ|<٭&3u%jqs7EڷSDg n|xUY7'pÃ$!s,Cd*{~)aOAMҧy'< }@O;g4NlU=yi'|BDJ0.˞E w 5aNrud sD40iV4O> _Ik5dzz %LkFb.5Д]d%ǐ'Ɉ;ny.N[X7B_ :"6CCEmϮn'DPEwA'iQs n#lm0V pR+%,F-yKI?#S_RNl2@b p?|1v9R͜hT^j__o_DLUv*QU Ak//}=Fq4X]>˳>,qztǿOǧevz@`49>|CwӕV~Y^/h_ʳ/?qX{u:}W%l<}.o{r6#{?~.o./I$͟W7hlӣ5)jк?ab[ϝFEQN3ءvC-r<  <{]V7N8jIȬ%8N`Os,0!`G nI`O)\FВ:_Ipf:Wf`N67v߹PW~>I/*ʅ'x(<_ړ4GJSJY|YtWAI9䧱J[ ("ikJkm 8Ă&lct-|7v읰ԏeo3%Iq$?a)KƁ3rRZJUHE$dt]ͪQc0W/g/gZI2d\2h:Xz+d xEAzP`k9ΥEUu1=$`vS[HWޠy zkJôidrxќ U޺W]Nw+gʇ,-38<|JGK Ƭ҄ c6iuĉqrW2r%;o`xKKvS0a&kt"DP 22"{``. ]Ho]Zrwʢ7Ud &wϖ+'ޫG|;7Go4W~.׸8e:Y=m봂bcJ2jXmc~^ÀL`@zgʁ4]IN "HX7c_ -)QsM+ ېT6@?+^~ɉC P`ګ j58/D76:IQ>?wŃ<lz|Jjt#Ye/Nwt9(_^;BUVF8"2-i&ms|A(완$5=NH2wh7p T}E~'ͺ iDF JR$΁΁;;s8ej1Sͩ5;Ai$2XԎtwTWhOSQǴY4 4Б4SE:fZ^ǘu8=Zkܬ@Ŀ}ĿR!ig\L; t]([ociMyf00gaɿ~U0 sj hH85KAH?f213ޙ Th`9H#bPz\XL60 SȐEݴ5' b*A8UQL?N)+aeri/!vJ>~;\c@> m:ZkWԚ狥\4.}ͼuRU]6(2]S;5Qxy畖OχxW/ /-͟|{VpWzvj.lMx}=Mk^Y]Mg\k,mDZ)&-m.R(oR7>_2D 8Ald乑Le HU" lHq~(K,q/V8U9 KӉ'Z "sfxYG48y(ڧk)u=e6$)Kb|lT dr`JVwS1;+CJ`8xC)!P[yHfUuU,/9w d1E QOgqD!ThQj㢾u>Jz)qbJ+H%ך[ra_"fQ/QHR/+$H Z; ^~j1U|-}W7MWqA'~K כI/~/{Ex*"{4u" Yyȧ@Bڍ[mWgs7 >hHmx'r#]{l.ַԚp#[nX`lvQo[g+?Q,jyu4rn&W)xFs!͏c;jQE2TKm}cJǼ׋פok`.HsKLoUKFp64z[vC{"ic]U]>  |P~G-w2-Ji&vטAj1' !f+P fD :L>E%!mp&֘Ǐ3Iϧsg-?/?%X\4qg=RZiuKAip=ؘUkM_]RNF<[{CW ]yd:AV]ZtUPWDWzáׯiRx|j+z%wqj;KWۡ=o:]-Jts*p_]+UAtUP> ҕdOtUk*p_Tz݋ ZNWJi..˸k}4uWяuQ _U|M/j9h|v޼?2/z/) ́qJnkT2EmM?\\_o b+S+/;w~/n*ù!J+[pk]?}Iz_! AD#䈩3g Fbgh2͛FKwTE3g̼VCT;}/rCAkxB Lr sԜGFWUA7"+ND+{DW5+BtUPJKK#"=GvhMȄ tut'6e9/hmܟ J1HWHWF2#*{d*pc*hoj/(QtteOUl \`UA9HW_]C]c;@n9d+qj;c']@W=B#*{DWU}+B JJ:A ^ j<];.sC7xfъjB/Ӳ#n5 A*ۼ:fGiJ0F)-2f6[/+kh3k7YXsS2%Yi05Fve4&qmB ^Wa~ ۈiTh$XT ҩ=6-G3%Δ}ARm-ir c krJA;MHr`Yo 4t ]'B)Jj˕]F `tUP]])X*pMo+B\u JN4j0GtEU /tUЪKW%)1OUl4Ep-UAu骠|z}Y:rs ˘ /[-IOՋow?GQ.C_קgrH jXՋН 9&ckoZF'_\<@mwJ c>!xaO?^4DVqB &CEOH-aSv@ _Jӝ,>!*DE c/?i/ΒMb6'aw/?퓄}_?:o?Kګ* Sv[-:Rzy+T߿Jq"rhQϏ߃ߊZCTmqr|q8HU;1d竝^c{_9ij=pdס:Iq' iu켝s1N5]MF>U;;Ze*~A]o4"l:P/Ty"i*۷qNO5n ܧ~nhQt;' ]Qxf\t%(t H55#`+kgsu{OWһ]=Cr0'3h6t%py6NW]=C&r3+f>t%pBWUp糤@JAj6S킖%|tɪ=o'CW͆XsW2]}+tUדz(zvxtڨ̓Վ(5]ڞV>]U0gDWЕn.tݾӕ4@Wρ~!sY~wVY*,V|dXեJ. ^իWWI!ARMaP2l%5VwM(߿rM#M{NNNKL}lD9{A7jؾFɲ;αſo7!Ê?^R90rtő1hno>ppbHwq`]z㫬]HXb՚EzTѼaPU׿WoۥTT1..k}L?Gln'ͷY)k+4t#B=]~>m&9tіᴜ-Kl_.^'cQ-=DdlM@5Y]M.Gvrә.vvE'b|#]_+H1!?U}vL/[}.՟qE㷵z 3=T-쁛1Y2IYbmQ:]:~GSfNH)c)jJ% VݫRw:[[h.TtnRC{J[W "D$w#&gmՂ6'F@(ZA+MַZCB:VKkAgDܴA3j]S:"hٸQ"g)F]YW-$X.nmِj䞫eyiNM#$R-r.Y ftc7id34fEĆr1 Eǚ B%gpGFkynd{W7յ:d]=b6J\GhvO^TĆM";IFU`M]4t4FT{}bfgBZAC uQE'iI.NRi6YRѦXBK{qڃcFgdɺ3|}asq{eTU˝ֽ<JIIUTw$ף(]pW11Lɇ%j]Z#wN8mI!aXO ҋJchj|SU.y AFUb}ȐX8pDڌ^QU񤝦IbR(>]C6+הOPpNƢFnJ>{,R.Q 4ZnPB Ba եfQ#6*yˌoh|4V4L9%XEA)ٽؠtxi QsGyAĹU \.*Hm!<.!kBjcluEѕ<q(.1eұ;ulzEMox$(XYt!L;*q b`.Lƾ(N uf9pu>uۇ k+9mLv/ʼnYi{s 6V)(J892j(XI#ʃ# 2|iyU yr{F!IUDI)u+%Y+gT0-4D|9sr4J."q`L 6Aʒq9!o eDLgWZQ|p.#iҸ xGs]]G1#/UUQf:G'ǘ@Qc󪈴pF1M`ۀY;[,;y`Ѳ ^{*jz|mz`-$t>xs:p8(6BO[*)P!JrD RUF"9,:bP8XaxRy4E"LԴʨ`>x JGM2`^Bd<—Md :[*xQ o6pu~R% 9ՠQj%wV56Rp*1wPڧx4yȋI}L! %:.x З9tt'p険5Ҙ S{ X1z*a3m>%8ZTR+u 9@9^!B ;Eh'Ռ6XX-;HZ{f σAwX!!ṟ́jc:LMUQ2Q] !JP {YSZ5p&L*BȺ`3A7<ˬB DX5{-6/hV 餑E8h<\#Ѡ8Dxnam@\+wm[U #\> 22ߤvAVEQH6>G@ZWSrbV6q@m=?t8^_or8M{q|9׉3IVP!n[CiA'EEU4'mwG(wR%[Y:Zmšs)DmzI0Id ƴIB/{Fy2#dbAp0(a!/Q[tlM1WTs]`7"Fh<0ў&r%5N `XgRF)1J[`J;Hu X 7m3l]` v5߁uŠ8͵cᓛkn1w(N_ݪ ,g([T1"@>JH,ܽU;p`B)1TEڨlJecT-Pǀ$o6& FnV< ǚAĦUJm3i^'648R6#UK!sOk:ҨA^ Ȕ?joZ [3! pڠ@DxqVr&*Mf@ F\R M F9`=ᒑ8QO= FU֏ (8qK %׆W$Э_h NCqކ`ݤ9jʅU0]@C. cV4KPxFp$Bi`hꢀՒ0ZmWhPmhA7fb8ƺ%_Ivw9,1k;6m%gxm >}:;38߄vEWKA& f3!ѦHɛbZz*荇=dfZÑ ;fO<ͧx̿5g+Bv*#] KȮP'B6 պw"JOv5DLo][g+T.veq]YnɮhWiYFvL,ϧl ~/+Tin!ڕgλ zȕ>y\ TRWdWUXvnnr͑uG#v;'zn㜋edW8kʕ2BZݮPem6pJY ]`+M.vj=}+Tzb] Ȯ4*+%}[GNJQi_ޛw'/*ߨ`4/Vi3< *.Bv8*Yݡ\S.Zihg4/h;FIv'f,T \4#UJٗ,T_fz*WVX`>IJV":'N׏x+ GM6f4F<=ܒdSer8 2dr͜k\7.AS4 ѮNdW(cW\]Zz?UrFv5@rRp]`rE6v*5] ѮT [++jUAT wcWU/Lǎf`u7#U7HvMt ] V=^x]`+{_v*6Y7+,#cW(e]Z{oWR!ڕF'%` \p :zR1*wIq݇1A%O/Tg+nr%nhf}@e֡ JyLFvƮPɦWZwBN] Юyu"k- *]ZnWfj8veR:Nd+y.vj5+zTj˜Ȯ@07"B6 zwQiolN,W(g'RQt5DFp3+,ƮPxmT4wcWU/(M&y C'_=CvM%LN]IV=7K] {.jW3v*$]A~uZ:L:E´&Xj3criG ?SSNg\w`#f'dXlPn=)dEg\Mj{Ȋ*u!9]`/E6vr=ZATٷ6Ȯ]iƌTRecW(W\ tdWC+`cLFv?y}7<BݮjveFgdW |^\M=*M!ڕceeW +]\Mtj}+P) f]HV'2+lƮPv*EWߏ]'V3.><cU'ٮڡCvMeP4ss-0 u6E U Cv5@ܪdMWrήPT}+TzKv5 Ƶd֩`^^d]a~"JfjYjMJfjVbG|V2E\?9bbD4> !C!(rM.hԫ Ka`}7&4 h{?-C^u<=? ]8{''WӪZ+xY8fF(LX-yly௳t~ZB[>E ,~{]mrwp06iЗ3|q33Tl{ߚ[0JAD_WϺ=QU_ğ/AJv]7?7SHݑrZ,&5D ~֞9e-T5` Q+ 1 /%:YUU}ppv`Ka*].eqƬmu ܞpjibU)jV qxp[+qma-Aܖ\TE)xs1Y=csC 'C+-w,ow&hidna-roڟj>N~~ڨ^̮.M?x㜦(Kx149ow㋧e""ϟ4{|rZV+Y4ݩx\M9]rg[_yeIVS8_0\\_sgѠLv>4}Ԑ?CF:lޤ](aSzdm>x,-'l#yj>t}R>Cu(E@۱>6FMd>iσ( ^Z,HC L] |uаѦR2qRU*.*BR *ok.늇 ^Pil)́ƃyeUEH ~x09a߾Dc#}<%;#nGHP榡+SA6p'yޕ"O||tX|$+2eZNT lPJ.芚C8v1.Ne<xxDEF)l5]L_ݎƫ.ѫxN^Lߓ-M6q&m`B14xO +?";Y3huc&'n(z{1ě́x-;U{3wOmi= C*: B,= Nj Ա:q)\pA"\_-uK`oMU(l rub付tͤro͜=F &*<|˃5͒4Hr'Zev}yܷ+Z6;\;^s IKJAPD>%.$j)*UI[u1CPW|w5c &:)9 %a5nf&0˔%JƴNw!]TQl+r* Ʋ)mndhcP6 sмRL ^\V>)c5>d2\J"Fs%bӚ({goCqTJ}Kj]~]Nu'vg[=}ޏn3׽f-N~{f%Y@8cNv3|a@-XW[W%5g_S.i͈z2r rJj`rruFYYN(T髲%1]F\B9ZMm ;$PXW#ż=iNkY߿)R'ZRZޟ'el-&%lr Y%`>}>]p`ƁЦi[-/1GvȊͶ9CWZ?OnKf'g S˥pܨR0?F𺯥 ' RY:y<}獁$Y8_[1﷏"{prxNԠol$\s : rko=ov[1c>|6*##y2quڶKY{^_'߽;M[^IE[fU nj+0mX-9{v:{q^.9[/9^Pux_ǕWW;>{? ۋ?sU{y&'wcj5׷>u)-z7ix|k6.9y:<1F.(@7^T{53]q3XQbfo,Q4X!tW+;Mv`11B'3J|!4VcƙIIZEf&mCk˓o7g'_.9N@?w@n=;k?\ؓf]yG-i3⠷/:ɗ"./~)h NUL{+(JE , Nhϑ~y$yڃX{2l)Q2ƻFAn w)\* R=Sk679$4ձ&,XE4GEvXR9H]mwUTKh X; T;ˆD]R<}(%DwGS!B8mJ]fv:%"" 弊[XhLH&1_nPK?%טk@ sCVyT36-#肯Lsv)0<'] aȒ!,a䌳P*1]jӆZ\VrVa<*[4xRZmq*S mce}JmWPR0xULsu3g8_ 3ktPŤE,OF޺*dmF9c+U#CjFt`2(bs o=bMUZB j1uCj1+Q;Ȯ]qjJbZݠVy̓މN);LJ@+)G/lt ?Wb ^`3h灣,7_"'&<ޫ"RM4\C T`4MHcUq-}`_EDl(md"vuNR0d^E b*~M@&9<a]794:Bbi\\%خ )w HuCl6&uo6Y%O?\"ӛc!6+u ard>f֌M5E/Gy_mPvЦyb6Vy*h4kUYxISrVFeu6 A @]oӲN mNb[聹aЮ9w-2E Y޼r؝{y:ݽϮ>Ovk |~p][㌎igx(ol{wߗ]^}Rjo}g?V~RK]tLuPR zFtToFufOf.'XK~{i;{{>7-7.a,t=I1$i+4U"G3*FTKVQ_(XG*MP԰k37l HpX U\q0D$r-U݌:Q/Go ˨B&F}x7s/?>v[yRRR_|H}Rx$OnMXwh~; 8;e|ibBf Vrlh Y$E妘{+{Bmqd"ΥfIgTA C+{^<<}h=E tT7M!]J2ܰ8'1Sasde fovG2Gt"E>ɦXU3Uˀ:&-!8Jr |>-U|*t4hb0HBT,BiM˕L\]Au״บ|ϯc\BgP$T **+*9 SS5116[t,>/׽Woںn.7*" ~m>r3tF/R`0AwUs28/X|ޯ>_fgvݬd`❩=L3z#EA iI..e ᫏R\  /uZ 8m] )4TLL`"Y&c>aIBI֥ aL ZjsR;KXG0ksң9W]w&v|xt|*=v{d s6mkyh< ,,/g"md_ :x_n "5dۡʯ@үh?(-[2\aRVQV.RBXљ TwnbFY%<>,yw`l +<$#*!"fܪԆWc} 8W"q=e vo\G035+1"[F00y.rİS-xPJp ,D,85>DW` ʬbN ?\A쎍ɹg,Ի;P;+Kl^Ye>/cY]E޽Y_W(suFea3Ɍ2#Qt˜S?Lq>O瓋o~].{3럸31:mmӣ_:~5׽񟣋/?%o˩ 'OߞExP`iq9_ux2b<v͝ k-<ZCrL|m,_oؚO5((Z 4́7qv$,o@{&;$Yjw fF#KXvǷ(Zv+:$4ULgU],>|4`ZnZzCB|ڷ^^ ҄50[T3+/;F+%jם:*n  ?M|r7g18zyg {XPF O=1j̥,'&*w*#\; =ΟaY8y~O;A Aƅ!p>:"]6; 4yBq~6%X;lN@k`^eoguc v!Cf.l=}y f_}Ƈy1GO_b>,NX:pP|u؋:xGfO x8x2?HUC3GNaryݛv5!*%!b8 Texe;+_ٗ䬧ĿVN=/:XN `8?"P^% ớZ29Л.^?yX^ź*ٰX{8T44z|(Ws[`UPo{t4[7]o8:QeAګ^u/{P{Zd:>.O{Ksz@kAS-S&C7`b*%DYzVMLE\f^)K%ثE+g n۽t7m[b'HKHleK9 o"18.9w\;XB9%>%\1ܶ\'ˉmrUV_~Y%=%SrڪL2rύ$hh5Zv5`au-k߾6=iEڭv{s5jSLKjr`d26 5)WbvWu=tiN)  ŒȕO wG걇]+PU722\Hf NȺBZ+Tkyq*QW+.R@ BR@=^A8iWɕY\Skl3RT^+jۢi" V&\\mSmWO+&:!\`y2B+T+Uq*pZT`IY콙?Z6(ƒ.@(^ ?9BXرg 5ʎrfjT޷r +9[ƻ,+0BVŠzQ Ҽ; & _=eɲ*3|`h%:5Kw9m֬t{?V"(#^eLK'_~xHP R/! j'?$V\qbVm5#]\LÛ7Y>ݫ73{{8]~ K]+ֺJI:ʥ*srvsKB:sg/!9)90"dprHWtv\J;\!̚p3"MW(W'3Tj,Rjq&+t2B+%Mui U2jq<% B\J+JH}WW{+)E`N ȵ$TKMq*pJ)[ @mZڎ+4!u|pYhBmbj fI|6K=Mke#tmjNMW(بdprJW UrjqŤ'| yq U}u([hTʪ2`ߕJ~\ Z{(2IȬ"ˍaNF˄C#j^nw} 5RY)Jh[s3ʬ Eˈ"Lj)#32 辔}& 2вqO| IB=9xB=&ӒlR]OIzr\kk]W X4>\*v\Jm:\!nM :\t+TԞTIp% g%+ӉB@F+P+>Wvz+e$)M`@.#&\G!J.;\!\pIՎr)MWBG\KHR#(Xdpr]ZATI=ĕL\ ذtBQn5W}p:=+l޺'6rq-4PO-mhz*Y\LmZP\`-h2B+TIq*+WLj*WsIx7ӤԨ }e(h%} ʌZKjZEmBv n|zr>ڦϬRv LrPIrm2^P;BLtC\I"N W ذtpr9IWV UJjqQ&+^] j @! ʶ-!Ip)1$1/* Pmjqe("%\`IH2BMO/~U jqe4* D'+KM*B]g޳!歫z WOnӳjml6\=e#lm2/pwדH*BWҰW{+Ƙ"!\1 # r P$mTWRi#O^*mܬs M[$T*kMe zVlW{[PդJ6mj`IIV_oP&8i͙$%?9 A2=9T)xדÞ`ZӔpMk֓KE*B̴WW{+ ˄?Ju0@y Y{PDfǂ{+^\OUΦ4JRt c;?'(5_e89M0S,ZqTiSY['L!`!|0?pwv;,7,.{󍍒ʍo*kMz}vכ[6ueCK[Π*J<њק/q[u}Fe/W-[EB{Dz5pf4h\ ݢ!'*#G`=-?st&_Ƨbq>!@6ٱcR SqΤ)"U'i`*oI|++Y(ʩ2FfKI \־+jh04c7Gዿ2x F!~]NќK t Mu^yI\P0AZ#V7k˼I^SxWI&>ܬ-Ń1k1L(}/X Xtfӡ8tGj/‚Kvog0 L EU& L$X@XTTPtv@[b Z 4As0 mد8;E AR::CV9e*%Hcm J{kCb#$l SK <ֺG9D# 0\A(j<` G4ek6>k5j*8PPT/Ae>\yY\K'&n7~\* dl(`\!$$ټ$X(% E'alW(Mj*3} pd/Xd*z?rʞ-@z3|Wv&2M!{-'P ]p,ʨLhD'0i"n4[SjUZ]A=  n0LIy@C\ZhPl-KBa@1%!D,l3@PS0y޹Pъc4i/uj{QQ@X};ZE=+:Ϋ#lon$$*EkmDd,wK-ޅķG6K#FZMiB(_Ȱ>rLrr,Vԥ&ICc,FyDDa;Y #N SΗL|c.]ߌ[-o`)U7z~Ly@l2X ; b|PT8xi?Hҗm.W%Kjk%.2C6a1'4G ),u^ 4M6L.].}Ru4 F:]"hY=%0A%b+ԭo2h w3h8O*YȩՏ̀?oɍZžc3EvMl4BNG2d緛۫ |}ww.Oa TM4`U> ][{ #C+~uic?ρ 7D2/pAaʠv?]Kpm9GKշbhǎ@HA-D/;PjbPJ2.̋jF.hs'Yy䰜 @HD.@GPF,C,J35ȅZ. py:Z'xw؛GD]OpiQRXQvXY%3 ZN=5p'4=>Eg9D; j3kE k*iZ[o ^pϽ`!-3-jw$a7>[` p_o[ȍ!6~Q^_yM?\t>ߎ_—bM{}~u99W`\yP`0u Ɠ/<4z1Q6EQO]= >VmQ^kIٺ yhYDh4v/fcrvQ07W3.*3,.w6% <%:aBrx%׎n?Ԡ F=|&:E7%\+}C;8Z sA)a>zbY84+tO÷'84`[r|pp#5r-ы)6TyDzaxD 5x 31x׋\$X&נ 8EژZ'c6# #mcaVC܁dxETAY[n=m!1ۀ+J5R  tkvo痽f.e'Px ǬP kb51/z7%a2ke/fW'iqV0Q8.=fwXOQpr )XIr[1;p4L6rJKm\U'.,P˩ՆXMzIy7eTp/ /0q@B;C\~A+{G=F9`Tqr^}<[nnΆMLZ&m[X p#}/Gl~6W{ r;Ņ?_]|=5ȇ<-36{u}O?Xc2D- h͵|Ïe=6g;\0Pq\c^lvgH21Ckj㙳gd hߑ]OpW/\ 4 tI 64 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@:$b( I o<$MGfV_avI M$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I Mn@^' j@@a$Pr$)&iH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 tI š@;DKnXOh9$PfISL9hH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 t:IgyRʨwww_ߞߴw7z[Sp)'@p֯&}p (4t [zhagה%Bӕ$tutL1pH+ZJtt(Q:AJƔVDWzg+pc^ ]mvJPQ:Aʑ\++Jv%p] ] xY.}nӫsY: buuZ~": {te+tK/""i=t%pGZoT.tqݜ_Ⱥ{[u] /(|?D>G]{^m16,)=k훋1ӸWØ?|k{[ ĕh6}_c x9nCc?#덽&܈7χqwnArK>O#VvMȴm)#ˣȑ.]ِ++zJ\ 9y]%V+z*$KsKm嚱P1GzDlj>؏y܅@w 7E ߩݭ$1͟ECYs[Ggό6/?m^h*^(~mmޘxF̸牙_?د Wnkqr6''1ND)Ҋ ]υ']=.|t%())] ]QJ;ӕfOpt%()ҕ$gk_ ] BW[W (1JW'HW!{aEtޚЕuQW̱ӕd=4ut偪k21csJ\͠AJҳnMGp\ɮ>xG?]/\zCtu`vtu\׮Cc죫P#v+tKoٮƻЕ%NW2 ҕWDWLn=t%pjJ;] JV:IΏ~wO7oPes/>hZT:6n=T8F"n۽ޜ3Cy<9|x[<9Dm=9DiI=AONp="BJnM ZtTtqHWk*B=qrBe/ ])A$U-+ "\MBW4~ZQZ+-8m+%o ]!\%BW+DŮ$])(a|z:0j@PH ),?І7.WPJa=[&}>,'.fou4s~9sŎw9BRri,?{.B8s=5U%(' 4w0s{gkZf_){*ٍo<[)û x ϪRŴ%(/$8<:grTJKMZ˫%?=eJk:ƨAt0GG*.xllpheVwSh*O@=z*fϏz?,ɣjG PU.mKVPq{)WOX1^M{baתXkVQtk Q2YkOZh%[DWpBYhmwJKUGW_ ]^ w}{tZ}ˊBٸït%:oS)P-+L o ]!\NBWPL쇲iut(t{*ژĎaQnU2HCihpnhm ()3O-Eenaۣ/m#8 ۶nzsEͧi|uU ZmNӈ؎ M ,iUE{2W&*h o:]!JEe"]I%lӜn{ȈV4޺BRwtJ)-Z5煀-i ]\BW5~ Q6:d"-thu Qκzte4M+llk ̈ZEB"]Y-4i Ypm ]!Z-NWttЕܱњ;6^h8]퇲i=Jvtu߮*ZDWĴ-k<]!J.:ztŤ֏vg_W 3wk<;n *+fZy~lLml5zE1hxҞn1ZB?<F$etq" d.xxE+ț&nNօ}Ip2e1hȒBX K/2R }.K(?aN~9;ضL|8fΖ_‹膃-KYඇ_5z w~/k|c~5Vv>Z _]7_b>-?Lil -v&a벬TͳoJ A1ð"&fgx.iESA$%uf3Y )pԦ?w]pE7KELg0))@1g'IRq.E:kk* & )9&d潆V.pP._qI#W`Coޝ;?X%͖ uǽyg-_]F0]$^NI<<>*cO6唃 PM].U|TP_Q9sc:auv| $º5 (dP@.0J e]j6lp~>foX~~X6yeGT#O-sUm0Uٓ :qe}[NgezE^Еn4{m>M>7-ٿ?ʊ͸Pbm> Re_X5Xʺ~t2Of#ҾՂCx14ΪA Wzeˍleny 7aYѼWZ7}ۗ)xCuѾSe{*=JU#K$@r&=m(-,@?s?Be*ɂ& d\#QQYGPznu 1FB2 S( :=^jL@7.&PRI޴DL҆~ﷵ`x8L߫~P 2m Q"@R A[Yf"XrK\pe^xיTc1Q)ML Eʜr@ LeʬbN OD1ebkH)ESBrP kudη= &U2[K_7fclulS:K28a DJJ@Lr j2yK5 1Aey%1tpb>Yo%NM(Mw =q+BbPiA""sb:eiĥ٨5g3%T{*EuM #}4Kv ,prﰸK+r^ jvDX}Jζgtot 'H`r]6!s̎;idƊ,h"PC<$vԳz:Ҙ'{O\ph# 2)m*zPKЩC*f;橅y\war:?7񁙣Zﲖ^8N QsE8842O4 ` XNi6:gQgG?}glL^ m~1Nu*S7O?|0\`Vu__|)W(2BxG ,a;0W|0xÇn @qyFq~E6:>}{9+~ǿ?¼_|ʝN?}.88r1#`翽YgyN'C\~%F\; 7+X~Y Ӭ8ʦ u?s.f \v)bRIZXIOߖ6`8 =BXG[^/W~]Ž|jS$%G#y$2B p?j4>G(^BS~۵,!~,+_`TB,sg"}}BKybn19bfL8Lyڶ`Z]#t[ƫnɍŭRo5"/7_rޅ8 Kxam)|st?jϷؚO[@2zY-޾a9! c030ZOʑ.8ʱ侻t6Q<WӵL*]2>00~0*`zKr-ޛwynʻ*džRL̬,Cw~ˇց"zF#NšZz̃wǷj U->㊣s\'usxNWab2tTh89:·*i($ 6X_x,0fi{b'^X-$bpChbv:rQI)9W`)UrIf˃Uw-~/jǽ()%L㱎2Qh=7΂;UHV3΁k1Zد]|><{-z ߕ*i?|^X~jwJXwSKRDr~˙eőEwh)ߢ&/>-{9N|S.+*<녗9]%J5^pKXpz73}.%`5p{c8IńWdÀ2LeQMZ 6%1Qt"xX?+,[?^}O8x|tw)-rDAgS.~ >yt BcIk6he<>EAwMdP :$l+zZ-8aZ%9mAgmp"+dFSA#EHK-^a#@K(ΜsntMjN'f#2BA6"ʘ[ZKDDj2xT'ϼa I'pieRtYF:t8c^5uKRvDKnEx3ng@IT";" $吙:Xtl.neϭ>;nm#GEێR|=`]n2s"Odd߯zH,Svivd5bwW*V=ng T4 |OlƞI$0ZpXD;ߥ@PRnQ;$8>w͊7 S4.7x]u!sD1I@$A%F!"Y-QV!=.v6q/7z첮4Cy\]2rv2iɽ~o{g?/.زY羉Sw=kKe^]?載%=}0d&ZqW:Hh"vEQ׫B]_ ֋\m.%`K26qzaW{L2X2t)@Fcm]hZm `vkԈo+kŖiݝCwvPJA;d_c.z EBgP_e4#ߣޭG=ܙwAC,d2*ɞZKe }ch%'/ |xڒh{ 29;ӋpCl%[֯M/MM.؟)(1Vj,e2^t1jmmZF]!*1cbӓO< ɗeRTkl:{S7*:Cm!6~Gk_Ud%8؇/}l_ og+ u <}N2XrPIޢȊuI(bn4j Vg/Vrj65! 9 vcQЖ ,b7;^^-s(Vt jhGyIO#[4C4'A 5@DۮXRF{؋N:;5E*+,GQFT*!;م6wW0"6-m-|E-6Ͱ%FQhd3*A{evj.K bS^ ao h:;lg"!l٧ĞjOcl:{d|#YT!:IɡvQ6r]كZD˼JklQzIoP/($H+@Fː?ŃPa38#z&6oifg|ޯ{566͝HH)kgqX?z3^CHɛrJ :e)k{CC{=0U[qog>~~ԄG^ qӕ oǝqp̬<-^z8pg:|gy/|e ׼599&mл.f>-Zn^z۰v7n f8呿v::Yajz1[w|"jqRMx̃e턥qjj-b<,Ɵ#WZJ|zf6VAvL;Ɲ$WOs좦 CH#ۑ5{|4ml@vWz1KMr~_m7[REC Y*$l>"-ronøJиJBNJ2oev6B }l䐅ɭTPEeTu- СLt1O-x%y

;Ig,Cf4 z"1[4Z[ӱMɀ'KH>$g2{.cq mJ!;ZK;睈:ɶ&z/|WLǀu苔Ev ȓ$G%ͦPjo]Yb=TuR٣NI^*l@K!]Lf6eFٌz% =r eaFX`Q4>Y(Kr+BKi]%*k-3kDJ/}AIgeKfIg܍Ps3c`ys r> Kju͓/ɟ^6egKķ-u`wYuNCTѹ~8ѹMD4c !j+Ke)EV0H[ձ@RdSѣ?tWAmaKܿCvOOŃ:4x(0J Ɛfxeݱr@#Fj^ϭ$+QY_}KL&=<-k=:]tI}_ٕymEN.hEN-9 &Ͷ s9Bkk;WL ^!QP4Ka, #&!Hj۝>i'WnsV勨-h¯x:'J֪$ FڌȍuG# t";!&?.J+O\OY_?u5̦i@|L.N_O\}z? s1:t:uCluߋC&z~y}g6&?2dy[7ej6c0G0>?ɏ=uk'|E9xZ6DgitLW8]͢Ŀ.oGp^"9)Vsůo0>u=Gvq?3s~ϗGw<:[}z{NsnOҺiwޯ|3;g)[ݝ5rs^4wsy{N{ZHߺ 2k\]j辉8#7cljf/Uq1L6.WnjՇ8]@\$Kd *ZU\Hb}H#qOpǎ- "q4J(lE`aF$U@uJEHRGJ}izzח[nCwa|.\?68)g?hu P)Wjv4 :*Љ )SvFbjpBiJW2d 5iRl2b|A:IVVDkfӹ_7W}/ C^~ ;ѳ|&RWeoܾ?ͧOgBg=H':;`9D6ե;GH.$܅s!q@.$… 9F9)$*:B1!U (Qb1 Ey'ЈB-  IΓQdtG$L!;kQiC#AC]5?QKkVكla96?[w;Tw!m?=5=;c[X0 B fīτWW8[UVzO8yBMt&{,3.byJy8q)'q,f*-$ eHr$*r$QdQZNkV~-셋.Ỷl0lg3# J9 BK"kaBjl6eC&7-ΚG Qތ~|(>!xV:*a, 6XU)a0emT* :85v_ґ Df%etΘHT$ 0 s+.IazD )l ĜZ*P}YhRm]ƬE@XϚMgO=']%|x(ԉ1 F{Q &O]d,%5&d&3cGGhׯEz;*B2Z(oM)]PP4C]*e' „BVVPJOBa*S"RٻH?R,J01z˰RbHGp L|5c3_j%1 BD1Lbe<\]Dv5u-r Eȡ+*t0Q&)i dR@Y+"AEt?1t `e@`3G;XuEvo1ZV.S8^0C~ȺRp@fdgJ64O6Y=uotGH8L1Eխ &0x5]`_?{ܶl _@U[{m6u'')c`~> $0&\%3Ӎ3===]S3JI%Z([v`tXUPaØql 8QB^P 0% ᬈ3p(q8;1FD&4a h<3L',ؘ-m V(AXw^4\mjzy7/.}=ACz㝆φڕA98vQC֑!YG Aʯ pM$BHxx # A %ľR1 'D ;#w.ߢʘz{o>Ǯ:`Fo٪ӎlaP _|ƪ}ZmQUsiR,z ~8(ľV3=bwF!?طZ2w4XyЬq)?8kZ)T)(U Ti}bN8v&US&|pZTӼμ-8*U?'Q6Q#]0M xA/W,hSnzo᛿wb_q{׵bNvi CJ$92E8 çuY>\<*o~aLY&En8F-Q$G m{C>5v[ 1pN^u7G^8rnX_-ʯ8yqn0Z>`)Qokfh>ʼWo)sEEqtlf]m,J/'#dYz9\ .Og86N`4L|1cw/U>" !I *"W s&&`6 $`':U,<2ڦ,HEJFC&RDڄIJ82e$&q6TaGb$!Ƙ(eaJc|9(Bm;b)hx"/D٧g!+ o;OOMj搧&!;!\&\ mv2V]!`Ü+k+th5m+DtGWHW2jCtB¶X!Kt; XW֪TK+k51!g g j3(ꫡ+V擢*V;6 VC+Y3tU b+UTGW;Ss kƝ+]!Z)NWdvtuAt%5l0Dsݛ8>T"xL*9WQE6G G/%{ȯ-W.6}o6Z ڝGS L9Д7yj( ƛ~iGEBR}9D@x߽ͯQE/ E~7q@ u .J5ҁʙ\߼ ] n+ޫR&QETt!`3ٗO_y7fũ1k_\#} CYx x 9>ݜnCv67Ktn@vdDtv R&\rspKժ5BMyUBI\"]I3tJWX ]!\uh%i;]!.*st8DWXg  ]!;RuEҕ0shr ]!ZzgQ*ҕap]!`up-s%t(2ΘK3X[g ZgA@I+DI;+_5?3X fa7jV5DWPږFJwtu1+@  ]!\\+@N]&>]!ʶ,tŘ%Bg8]!ܦ+9UBT %]]"]IM<՞h 2|;+3P4wu׫0JV}/f]]viXMSCWld#'TCZ?e`NL-e4pd-j4Sc-5vj Yj@uB>+P mSsC_V6Sk4RZI2p.S8և1YmJ;%܀& bDhL gY0R*E`{Ʃah MkaCq]}N{(C da~( $e ?iW?KSO؆msuY~v@p_08|EkXJm}oCzk5TOkZ*Ѳp(sg F\k9RRrgWJR l[9.p Q  wbA-BJbHW#R $ \uhթ䬣 +ŭ%!B8CWWW vB.`\u;ňV]] ]!Ynץ+jp^kZ !t(Y ^"]Y`+]!`N gBֶ~]NWfOћ+" VlHZWKe VC[`*ЕPSI-gBM5*NWm:: ]1)y Wxh%pW 2vB\ttutt)15[=-0vZYԠ[qQ'2SR*k$۽dvsit *Ul3v5b7 Z&n7 .*svP] u.w0m+DHGWHWRIbCtRPrҕRR]!`ΜU5+DiyGWHWZ+lܱ B 3xte47֥)zl; W:BMmQ K+9>]`E3tõOD(z)z{E |J%mz_jpYЊ -l]*zj!sg દ*էv]4ututŤ.}oj +$i@v]R.=Ž*i${URYe6dK:ɺS.ҺGR CWV +an JpgZj ]!Z#NWm)]U\4ڙ,@km;]J:JYKKISz,BKWJ^]nYWHW0ւK ]!\ ]![WҘ.,FDWظB]}5t%~͚ `aW ͖VfBҶ$ٟs;:D|U5zZk]+ l;]HNDGWHW*"Ctv V޺B+iilR loݢnRE!-o0Ji%cZ^RݜV5k7pjw\8c7ZPոj(ev< QCt cBJ2!tpKJ2Ii;]JJ9K+Ŵ0.9X3 ]\CGڴ--8rJsJv0keEgǮ]]"]i#K68CWڨ;. PP= '}Eg=(x ƃ|vzLo4OAsGh> &aFʟk4Sްʼn*&I:L"xD8e~^}20-țߏ]y${wݔuogc>NkHi y*?ۂI* [|8,Lο4D\v8m%{^[azюϷI}; |g{e7^\Kk&O?Wۼmnɱ)%7 =JLۼc9zMi[*ɣd:{WpO~#-_#巃`z5٢~Fp>VHD2)aupJ+{`=W=yO!5tJ[J:<91ɆCКE_*[SJT*nV 6 ~U֪UOn܉RõUDO`E`Mn!f]*PϛOI4G{2_㯳pl83taLX|F=f|Ҏ,{ޟ~ 'o yOsk4D,HB_PF &1 !߃mw>'Bʓ u=L~Id$-L/oہd%IOIoiA%o'Í_88a#BI%'$fCbc&9""9W;m9lbj!#[lbˈ6'7)Um6WX^vG~)4 >?N_+Kd/┯G&n9V֪e?nR[$ϷtOW<$JXTGņ\Sr\jDrJ%94q-A1-a4I 5[mhyljDLS ɬO"/O(.\yz zMORW⸽KF AV%\Kf$8rL@N\r 8zduxL Q@y unױDէObTvS$QJcm)Ev[D(S^ pU?n'FK\Ӽ– <;a; h$.[o>El/n2c)u@TaA\D˱{%  v0'ĦLxּ^Q5uՀu=Abtkf3}Rzĵ~3 9/B0Yց-W| P4wuWAl4+[lo 4FOH}2~ d FnH,V}Ap7ʦA.e'XK "l2W u[C{m6֒Faښ0_pVKGư@&@R^W^]2?ŖE4V3ii5VjC眇m>#i1!jDt!Du*~ bzvgEj>Z{LkҖvnpsp1L19Ě']v=˲:w#C2?-5N #ha6 p3)mћ%qr~d˲ڲdfӃ`dVTX|!6Ga}=/^8c/݇:plU!g̤`XCy!r$JسmN`1mk<Zz}s$HN7NJ%9aY[L%;g;lZJn;|Wt%>d垨!|-K Y}қb-`N5=cΤ)Is,%?za憐opdKy8+4v ONVᬵY'<aձ^qwI똬 &CfSKɠ7Ed|UzڦCdp@i RԿå7\0t x:(jzIkia Y#=hш\qԹ+C+N@sao7޴f f;Zq5 *]WJ95 MmPzSIR)(FQW*# 3E5ɤȍ;/ZʡRW=_\"YK=M֣g>p6/u]^ ™\q,7H1# I |kׅJ#mw@*}`T4` Fi-FQS gFe(@ai?T&LgfS*ٛ?+~.,~g' lZ_ŕ>!2խAJ+ l:C%皗 7Rt4L1VXB+.,Ws+3%Lٵt*[h]K2Y|w|뤖Z8Xoߒz-wN m$۰/^_ ,"qĈ#(a4C0M[{nCDt5qwU)-{MoV,șmϢp}S#;Wv),*0l+WNG~n3'~|OotA* Zvܩa?VcL V*:Nq0'^}q}zqR-uwkb8{]4kCf%ijG=ͷl4&[l( gfZ6Zuoٯ`ٛu0H_ L6p<:43,\qxHi=-o:==/zYf9%LVW/eS`45fŅNɥrXjKvn5DmO$!pZ !7AY p<+r-%3t;VNu2V27vh"?ᓞ+vxDi %=D!i4Tw ޟt&eb3寥d^ݦ+%GFtCyl aG7U{.wB B_ L6g m:SK 촗tp&<-Hv.sVyZpRb{Ykh V-vSܰx!dss% a&h60{D꙯`'aQ=CaG/VUTvo+*ƇߝމDRw{J܀Qt TUzfc$y^sT֤.KzЎI \((%*YJr3_@uB$MqB,\pw$1h.:Ab70ݛzmVO02QK;-}x%QT:]NZJtPr/SDnNs:`4T,b:دs]7R[rJ|n^_DC8.js\AP!|>"y<9$h_:X$Mɍ]^*&eq;3yФ0mfح{JP6we ]Uc*O)Ǿ}y=ώ}N܍M]M{_0t?z!S|lT/˽_3_fRn]9?߽S,[/N (]+Va85G!IMjD{Rr-;ӝ Eu"-43/.'W!mwtxV ggy]Le!S#WEU*oYyrWW 4dZkIH]O@ɆUqͿ&$[ x! V_2l%孟" &9n֞SYTD{8v)Wx\A9 9PBRJ.̍"[Uv+.]u"%zL'3#?/jPqŅSnT4@nHu.P%O|DKIYUʘ]1m")fmh=96۔(D ;jxM)DbMR)`%sLdk}8櫾WJ-(?z)Rk,/ɺbf4 M iUAQTWe%fl3Ndl;>!W?<_k-|r3ͷ6K|G-͟l|}w2X[$Q? H HC2z?R>:|}C0\Ԯp=HbN_49wW<&`%vz3EӜiZcT6)1$1DOWmGhRY'c3W/ihO?MIa՞6sŵR6-jOg(,vgGX#K)JfH+():[z%k5O4;'b&ܧV'DT\kbLZoe_b9w,& ½*u-GMMH mmInӔfAVl1tx6=758myxF ْNW~RkQ9{GGO>m)JYcՏ}SYWiƑ+ _๝9"0vj0AM8;\/i b~we! UBC٬Wms&⎆JKɢ˾جDPY raR+8o#HI#yd6 )}]0x>PcTB_=`:f̓-! 3r&7JƮ5-E@ =3b A܀+c [%q %5vlt-Hd)49vnK"!.6`eTKhx+CБq+=4v+-Gx;W3mʕ y=UHISVR . 1ʼn]3;00G~Ӯ^|Iokn*D nj?$ Ș \6u|Juܟ6Ԛh=4"Vy@$'qUMU6@FL𠔉YvQ&Xϵrx T Ee\VO Tj¾7H7 Mc^V9e- r*[':lm$3yu‹p^% 4mEUAmE#أ %YG(LxcetDڈh:@V- :YeZ6 P yS @Ba]eODph#pB*k9?B!US"F]m&?hҭ''`R,!ZěM"zTL#dCJIu bO8@1ehFz2.rXGk2La;y%Ad1`^վCf23%^#<Ux@-+Ŧc"˯/*?W;MZB +rR*^'57 .퀹4vAaSќ|2Lsq^Ȭ(Ȳann_2Ydw?ՔISiWo*ZDmOR6ݴWR M6Rw{Łgq3(n"L~^ӀZY"1_]ۦ(AB 3Ft*"E{lM EMZ'E jI ^"t*@(tu$œcQ.GR .{H@'T65 Z'Q&DVݍfwAYO YseG]1F]2mW rXX] QwVbhNd( ]  /o˯WmШ+E#>]gJvDZ$zVpC/ "cr1T~bTؒ}%j%U ټ;˰L6k0DhͶwO72xW`3(Vk=:E9&ŝSm~x6fM1K)S]?r)XS$Hnskq]4-m^KbV#Y`%fBݎr~LHg;P'ZȔ':iL >O$RTbBRJ!jaYIڀ7LjM55G&)/N7t=l8rFQxQ6Gr tQ%(UN G%s. v\9Z|@V\Qc'E!I`¬A$0YD`Z~JP}\f0kprْ3Ի0qϐr".G{K$UF( нd@8i>zwS}jr3޲+BR'Yݓ):X^T).6@!"J-uAEs -k/\i4s&j gB4"ctN4ː!`[-9X3PjhV?s8XtJI8s RurL#0Fh,e\eatX@b=i^ă ڭݲeqIj.(5D|CF5AY͗xrґa0fہ&_$Fb0]釐EqG,Q?'` Z҈B"82\&D{10*_?2(woH>?b "t}aW'hz1mm[9\~e;36 mV!F&\:Rj=X(Z Ž뻦 Q1ܑ56=u=bCRe36E45Np7X&Y~b#I Z*,U gRɜD: G,m˷Sy5GX}ֿ%mGE RMhXd_[,w\*u VȘ` Zo#eg88l %hW <\нWNAQr'}"KDr$[Č68ۼgAU!c$p!̶jX._!}-M|P&Zs+>iܪe?be5FkudaeFz8#[|x+a4#=L Ѷ~۷_nL*{kty5}E%ef(7_2[Fda@qXw;!k7[aOц?d1Pb"u'zƂH;gk[J$g?*/e+{6_#""6mа.ӶgR (B}YKp0\~8ص47I\KJ&˟ow_h6Unv{6X8}dsvEbU>%@xVBN [Ξ|q6~'[+md>)mnO/0skU.tRž^sJH)Ao6#E(V9RRfJuX.#Ŵ]rQոY_PEسeG^C={ɿRRaLfw.PV{ŭwopK.Y ;l7Jb}sEBLa0TpR|?[s?&MݤZ<>sqSdw&VzOśMYz}3rK1 }ZfWV_Kkt5uPy"C⾧^TT| P+JŚx'Y^bH:wױVlpW+"nE:hu,XXtDC׸ 5= \4]D 9rsAb0&JQ˙&V%}L BP^Laree>Fǂ8 Ҫ0bS0sU Ar ǒcDƈ1A[-\i<"D҄`&pGߊL/yUaS(t_FyD 81ɐ!켯Kfy4=J)34%Lĩp>N C. Zu7g 9_ 5%,=RaJWijae VV(;c*挳3րlͤ&eQJV eL􀉜Ft!oSO,J$ / 팵$!U -J@(3w(_vŘ2@t&搜(<9;dm+ӞK3]W]xˮJ%g c#.19pz?蝇1xF,C\Kt0s(n0!ۤ?ͱZ DVX5y]c@r9~Z, -|(T.?c%)~_M<0й2%z͋1Tj]uA9a &hw˖8O3ء;/IA%Nq+FK~0KP0)T[<3'W>d|C|dH+Ҝ:. Iѣ<'$̓}.=[*ٝ֩a;2: grEᅦ@N?3ȁ#å.-Ԭ[ S.ѕot, v(7kHل5hCf&nlTq[TkS!cܥ7>svk|~&?q$?DxWw|2#JIȂgAF̈Sf9Bx@96%A-zxԍ&@1Ip g Z1 #ΣZ,v8!eX Lõ3ߨEт;nEZ{dnT$h[,|y`ލNS zd/PAV%-"+uCzJom) tg0rwusB S+0Uhf}jKbvle~l悛)iX(붏LLq:Q}Yl]c`édN"O 8#H]"ag8/J!y00@#Sc SFu>BÊ+匹^R:zƀHDB#K`;3wL9!i(O7pog*? r*dL΃cI:e\Ι6ydy(Rb*C$_rrZL,. &JQ2,~pOK?3wO}`ʚguE#3 | ogT!X'>Rx)*n(gԺg.唟OTTI4ƪ$ɹ:#| iFT,1қ{!DZ{ O@Krs l[#yV,k.T[#ԴT\Bֹ{O γSo_s @Azv*Nvrcq$KbZpF~ Gb )2H$(hL>*Ho41[VGŪvmۍlk\`rn}oºct֯>LJuvٲV6<2-4-Z/O||HH*Ptk[!DZ!cJhy>h!X!cPjEzw>:9*d-߀|׸bA cMj):sso7F8hO4%h8cP>ZcZ33& H^2rũ`Ց%$u:UuA9YCY$sZmy6k# =sB "zίB}atF6noSbyV% ih,,I:\+[U=DƉjAQ(ZRy+cĉqQɸyDiCasMxﯹvq|hH /[f 7w* "dܜ.+hBJ1 O#,TL%yO 3il[ }]nwUfI)zIӇ\puUsq`)~>}K[\Jf1 'q 1<$ ;\?-p<Xn*+P X^ovlVOoAyw,(s>yZ-93S?1P| hEtUH3I+69ﬓr_3誘E y%51'?Oo?bS&:[f 9]޲,^%ѝ{$`Sޓ6\WMvݝA H;,meI;*LJEE zAwQ~@Ok\/be{jj|&]BC+/o\F|&0#Lp2jJ; K $w;&5஋Zk%3,HJ!M|"pJ)8͟Zc Нi7׾/)(e&$50 q.P|뻳ԸKLx6i XP95J(Ztҭ5T(MtnKA/G08Xp )0?yAKK-S钥ĉsQ HeYuOTҗ< w!SWP{i7\C+RFm ׏AUC|4!WRA\`_;@K00*FFiTVGi^x\"Tzwf7vS|dV؆AҋT5|3K˩{S4u( *>uL#w}7%@W{ݚ"Վ!Q4- 6aMYOS "d(7)F'%Dr6$$G506~x$Hk!2x.ʡվux h{h?5 W 7|7F8U3H8^D5mY_"zgAcP@ƻ mhgN&w9˝/ LSo;TZ`MކxɉNMv2V|6o^T/#{r%'_Zi3XawG$*ؽrѪE }YSM̝}>oqyql6 @_u]wzQY~[s`[ťh .'n!`lkת+hq(˥Ҭ9Im(5JseHaA# ;K-wRlߘ >JcܴQU qCkNOIثV]a|"@ =ߍ g P~ntN)VWQ;$!I2H동:sdJLxTLrq>0QL8 (qVvcTt~ngM>ԝ.5*H SQTYG-b,`LuzBc^Ǘ36ҷ@Ȳa c]t[H&ԪWۉ^-0N5 p-@3Ү_=9?ܵ8׫hb՞Uh @YBMS"/l#056@;ߒvkLo@Q@,"Da"Mn¢g!^/z,9MYΙӯob:|}|rV/pDǎSwuU C;~[Z9Yn4_8ƿsqf/O̤~t¤`eyى'[{/TpBWjwѳ6'R8u[$0>0o IM0w 72SC, )fʨΒ__W.D8/^jA zVrΝSSa 6b׾>tl>"ӱ₱HIWw70ŵP2?=h\y-Eˀ)H2 ]B{ +HSUW68W^eOCʟvH)hy}Rzyq8Z n Xlf:dX#d0667?<'CZpKqkг[CRX;VlyHp_-PǒeY !B (!_8O%NZ_'N'vv?,>q8 cc *ߋvA 1#v?_HBHgo?f[_Ѻ$W7 ^JIW]넠O復]ȹf;rn6LUUҷoi3v&ۧ'&wꞣs j 6X(#WWt$@"RBdxeIӣm-UL IIIʂo$1$w0.mh8*T8/Fٍ* JЖi|kzdj\Z(jxý!{A2ª q;\j~k>ԇv_HŀӖtW1'|۶*kq`ʔyfq+~)F%S~K CGKC Ǭ).-#?- => wvNP_G- = I0kԛ"qj5N "1zF?49XXJ-Id[ksv?^Cv>R͋%fq#hjiI4?3Q(qGO/qu1Ct.lhe^ډjJF}Ӡ=rOm!w:!0߁mLhӠ7,hGdi'11ҩ_.M89ʋkH389K(AyS}bUi]dk8WOR/F%q, c1-㯓<geq*ZDZoٱ >&cCv铻; A 82O79 Hrd^L?OCoa4s83j6!#Ij p$qPvRJ(}[[5ualT|sQnKB={Bq-G)P>b˷d2ן˥F_|iBϿoooFq :r\KH0aF#Lp65Hkk5-A50J+gS&ŝ%0Juak@\"Mq|mA(fb6íLS#, 8 &木gKj`z_쯌;{KZj=-Ά[w #UZo-)Շ|dZbJK_q>T_p뷩u=yE>ejs?1)Ǚtb%k c3Ree,`E}p b)%JS鎳R$2w\E/nC2 Εd1bgpȸ2!ƪe['z$lE/̾7%I ?WŌ8!DăgؙSy"@$UۗSd#ȱ ı9!&3!E50*CT努E[: G3x-?b!W[PR8P0_ٞ gƠ7^;P-IVTo >S б+^C7 XjJ 2Cp>r%I5"V9;F":YU4gZSc'0x6yh)N\nk(Qnm#GEȗsI|8$m+%EjIߏdmKl%+,5YUz !%#A'zexޟE~3[|g|8bHWQfX$W&LPm0Sx*ؤMbQ^{d֚Q g,.$mp, !|E @G@R2#VĔe+i3F"07Ъ?ʞoI 9'p땥vˉۗ\-a&n/yuxOCi:hàGƫdRm[M}(yr>j4x)+ t &X_R@Bj/BZB=aw3E6J[m~6Y#X{*71F%I~H_'g rE[8T.TYH-T39kg1,qA?:FQ4p7zU观M %7x~mvCGPr H W('~Bɋ`^By^?$$yMIDzQŸ~~ %X(]S|ߖ6; khJzT4Wkk,s/jp2ᑔj<y?pT;PmeНq-9Yļ-4V)0HG-z?yTSFjK9 zW-BU>@qRzͯm*D9)>cJc \a5tfxM]v5 &Hɣ9v2nFNM0>KpVy5QƔ~4oYV㎀bmGlܓ˱5P8EpZ(Z-t8Ǡbmq.4x(GUW7ԍa*e=e[R}lS: ϼ?nipq7#5u!ń-B.ݾBkVt:+|:7L\ӛ28trqɕCD[Xߡ3˟ ľZ\ߝP~o~ooו'z.\lW}FG9,ߔ1b>.娝Ǐy2zOFzȋ]zu, ]b|W;NF(4zUo=h<]N3fhtnŽ͠6{g=zsRŲ{VG0-:UٸwB(tgd Pq~)eǖ4#Gë&ˎA? BHxdM7Lz3͐B !Ä&.^a< 1d݇KݷC}@$Ǧp"G=e Sj8UR@k.-u )Aق8,EI 5UZ*J0F`M9AL;Vm0frSFS8Ԋ©kPONpKDk^%OC{^4%/5fl`3JMjXQZz>zQgDeCFխ 뭎uQS:T2P!i   s PJ JKlN=)P8NKԌ![٢҉W"X C0 )0p[WZ> [IdVy~l$yH~;0x-y+ѻ (G/CzqF6)Dx 'BD JMYB BJ}5_ۡޝXi#$Fmzg"[J,5!gZYcTLۉ[.?_^י0# ݣ I,$!.Cv]HФ zχ)GZ[m{JE)cȶ`¾x}swV>c_ "P1fö$VC\]E^ 13*=ĥr:`?q*dhܿ@yT}oй&*GƏqT [( 0~D|yE9{d ͕PGEq|6hlQ9vR%;QᬽfQHc&F˾R 2mR41ӿ76)t}5:1* sa#)/[M f4'(tΦ%P:tpW7WkfDPU(|S"<НuPj+<ō].tBnAcHh̠E6E4vԿY-WSvE}ԁH,Lyߝݎ{Ke~$ؑ>Iaם#W^lqGC:O1=G9rT.YӞp 5vP?&ʈ|0H祏 q%`l[E!Ӈ"Z,ufY#&Fٙ=2m>ӄscDJ؄/߫xJ#EO}| J 3Z=W0+ro3sw_^|L_Y˕Y]+rL\jžf+}ա37]A8~w_I\Yۺ>|8 (91vENAK Pt Yάԫ 1D98ืibAlB3 Ϋbi+2G$.0#4v 5*m LAZrbVט,\%fYiKTfPJ"s-SES@v2GgN 9#BPvʜk f&QBn(4bUI፬DQFC{Ң@Ĉ_ X )gP&_=:R&dС'ʴ";9#a"t~ &ͩJ_e]eMH k Fc>uX2:] gB$Q\dM`=rH'Wg`4)eEA_DZx ZE\AɑF1տ,k ^f6'.:`en\!JleBFd N""_]ЩnA ? dRyys;Q֟x]Hz]G$f'YnVZ_r{f7~.Gs.|-]p2_g?De #&Qj\WFz(5&b^!]TZurra[jow]oF 8Ĥk\U֛YSr(yP`)pi5`#PD7$`ǠYFz2s$v~@K#B9<> GNDArbp[y{RpC: ϑ}s-B^o(#cɜ̈́^vwc`0޵#o!"iO-#cX*i˙pU?eFx U'l؎cOx6kMbHsv 8MON6Js?zDUBH;\8 @bI7g./gzjRLLp eZ#x Zw+7$(%^0\#v~^rFYr.v#l*RNa&QD}܃$8u x-1E 1Gq  SoYF[oqzav3}e01-2lv~je0gZؗ\ u4sX4&N2ŃHOVi+'4d\>I5pw͠DN] /^y #& Q0AjD? ÖRjWKI$%<}TєqE !Lg.-q2L]|D(/@q׎y&/RiAn:2Iq#`8:6Gs]Ðo ]e;x0d2~d~6w'pB`X Eouۗ\-e&n/i˜;gO?cO5T@; `_w[DԜ2YZ@c.˲$F0*%\aXoi>**O'#Tzbm>UZ(!|h1r+i^RdwBoh\|EF 2X3Uc3R#d›GO:?ޜg}AA]ۛO]]iKP>t4, t 7.[\@N qJfmCFX+!q޾C(t8Lt7>ge۟_B!|̙zBPBI._E-5+9~&0 [i$XXC/JXZ KDVH%RX% p޵F,鿂 yHaTK`jbP+ŀ)(wsХΡ-Q;,AuNJ7л EQ޼ay^ DA/a}AFG-z1$*@>y G,J-Va<O~(]RT{ɬW:YeĆت3b價&9P(9 :k) MMի˪g@yMw~`+&疏4óV򿜛}sXsw / s˹ٽV8j <~7И4U_ J'Ph$b#]m8p8;u_VR-(vBZ9W݆ζBg["դ\3)8J6d<(,Gg%p$~,Şt,,ݼC#%027GKic' oؐ+M:K%P5v۷NB@<_]#fHB_[uI_3JX/Dk>^9g>`v!R">}1'VqA/=/塜k):=go8Kv0ӒkE y!ߒۧD_Wq(4*M{H9j>^]1`(^v Ra\T(ה8;fbfL̽wFVG5#u8 9vC^p7fX?;gSAW lثВG?$ |m8S2km_kNr=#M*}8¡?I lK5џ^3ʰaFɑ"WoNc(?>X s.Ɩw"ϖ*n-]CsK! ?:fP1"9.)oXoaq~Hg(NTm~R5GQWLQ=G< ==_hsDž5(CJ=0o" L]`}G%@XO삵>07ǽ@;x=-a!BdS7ZZuRiJ`M7`GHِ#|/HQypMlv~o0C7Z;sXbC`PZIz ICTt-QQZ&qd0@klǕXtÊ%+Q YdE'|{.U9uLpEU n/lr (o˘nҢMe'kWv@,8ߚ[gQ&W ;&%k c4djh~+m@WOs(l}>E_0>R+:'uN8pɷUR\;T{:x3]VUsH셟aζl.^<p[uHޯ_|Ϥli]ØXo,IPD|кѫ7ZQi/~ގd 9Nt' <ŽġQN+B '5BT=5v7QϿ >$ﳯITfKUrebcAuaŃ(Ic*axZPQp LOZBRhJ mSz;¤H>ݻr61gp8P4Еw[@ yeRo͝L@Mm/3(N[]UƻNӗ\@WTz{н2f+|>S!ݶ?#aS7=[-I1dӃ\ޗާܜV5|{p%sLG& AcP+\ xA=8 sfpB!3f qJټ" &oSrp5=AիY+&(1K" ڋ-6t3Pu6$JiNF}T;$\~n/޻)U" O%%, %O/%pʛ?H,{k4חogD8>z杮;p¤IP8ZYv`ITSzM轳u-o0'rguSTjc ).NCl|5 ˍZhä1tݥM! p'kSRrіA75_;XAAcrC}kZOYۗkV ]^ ̐8r(St9To'60jTip% Bd],r {uĜG;ffHClƐd=$yṞsb`[t9: 6oGyV=_q5_!rԘ3<~27fvL'W''חx*zn [/yL|Ο]gn.ZЬW_Bi4^ ح"[2:KʬY'i.k<a|Rӏרnh(';~mJ;pr-Cs%1+jP=bƎykZÐŋ_N:W3+V` nرx>s`ahQ^CWGR( 5ҿ~,wKWaKՋ &k] ~xw V9b@`DǣW)ByФ(IJ g|ͶВos6 Y/67x9.<bOoF3S?#27̮aQ}ZKٝHNPq/xU.I%OiIΔԋ-3QE9&D_6>c d{pmZE uo)TpeHɃ h¹7h NՁʼn&nlz?vq <8x FUb(>!'?(]n\a l+^Vqʝ6Aih5ʬ7Ta'6J`2ۦ"O~so4ͽ[Px}븖TA]Fpk-Ȫ9j?K'w?d&'*JvVฯTJ'gRlۜTWsnkƱ%M N<49;=5Oۨ/θbdxrbۻmOϜ})yg}?y7ӄP\׼5( m xL;Ȳ!'[rQ"b^Cy)oѝJ Lq#<9OtRН%Åq6'}>JAW d$$O$^jEIT/<*^%`Fl}IH>5>.rC{u^-:v9XD!q kLA ;+ٕ 9;Unև5l4b.2vhS`wY+%{k~ִ֧Ym:Y4s2[f.)[9YZuhgӸ]'ٔlG{B;@;Qh-bcD`4٭niDž}N9&/ ١=1Mg^AW`[,_B[/nՁDuV䗱_w5}66!n1}-R&lVw?c+s5W}:t[+65l|*~ '_s/[2 ݦ?at?&5E7zJ$1rP%tQ[3g&RmҲd#iJ !Jƭ}XY l5=Yc{?5IO.kO3O'~'yo=@?enjfӢ!F(K 1_Cxi)L~Ӈ10L ~TDz ّ+UC01v?@9%ڃGbZ|K5 ^Pj) zL4$);5o.ӷSghePiͯŮPS#͍:Fӡ0v!ސ2w{(ZhvӜ#HRmIMs_bdYSO$>eӡ[F*(qcgeZo8g$6QKb$jO,!-1%)PWF2L͍V.9uYS4 uۢ{sd>sD4$vi9)kWm)*fJ2df>J#qT1Gh7M&Uc~opaǜC/>SkK 7aLy0b)(jn$&@ 1or'VGAw8:0Mg.aE@3 U4 63`8֎:vLBꦘ_Mwuka>Eph,ukѾ,lIF5Hٯ;95Bg2m2Z-2T1jpЍ&..8uʽs;&HRpוv^pQu^=[ݭ }9]_\]7̓Rac8ewnc#Q%h05 -*/dLY =$|'I=Em7OPkhU L@XZufEe|C6"*]dƽx\aZ ͚Qkȹ8Y)ި6!{F2 a -|㡅$(.̙A%5gn -jB q}.gjȳYt!\Asϯy1`6GFc;m'.ionv^֩j DbŇ,67m,e@Nt.mRO" %9JϳQpP ֓REMr)ue Vj-$ꓡo@+L)TkFIȺUi}\`؇ER,;$-Jmݒ|N8::,U }_ 16mJ ;ن爓`éP1Li2$6q6R٣K5G tMSKHvxr~K^D\Xj`nS<sɲ&Tp\ a) q8Öj+,q+fAno ')KoBHn\; N gYs7|0ICSB{y3z/gap\`L]-fla@g`іGu9n&<ۃ̵͘ȶW >8Gw`ͼҹ>=־^&U͢l3SͅD:@e} Qb1ejeDK)56iV bws !;xzEhId֨T?f4zlڧXjUFeDc|,BkO}^hڮ; ~X 73jL.Fjz<,cћWnTæbfZ_lLyCi :Ҕ9FARpt lԉJY؅:r: eldF _8IPx}.NDڐ'1)U+՞0zj~+gF%Lهw?u;sl:wO`;n2p ۟n)'x7#V$,my:uXţW _l@ 8]`ɷoXj`+%O老U =v 6(;wg 0<}/ go*yQdh6Ӑ'c#*.F`|#gyW1t!#TEѤoLũUPc,$PeghDRֽ3UPr@/,ŜOI;3r$ŶjLA#ﻋ Rn8a~ɎCNQCεgfva.ۄ O5|iu89tlAs2Q !cGXHWyn:{"gUXl.F#yQ|PKⒼC9 U?^y=r._٫4OV׶wfPX)t :3sV~))Ӧ>`/D)FZ:4)I=LU7V:#u%!H[aјcO#QD"#,bܮte/|\ZIvp:zZ9} geͺ_iW{}IW#V>gˁT6 N?籥iՏ4c.ͦGgrhggS<*?J5}a0^=k}?>cWniݛnF{j3x[_-)q:3%xa!ĵ*9KR•TԤenϒ$%VY~m:C{5lj.޳Y-'Ax`uyv+Ht!ܯR7CJw,Q1s0,U[A("o*?ދ~Gg3рas[};"Y?=/3jP{Aɨ.Ӑ'B@[ܝ@.F4(XsHj==ǻ%ܛ!OGcd̅tY7o<!I}Q~&q{AvH kFo7ࠫİ@o~] lZg8x]&FC=)CD#9&$͑ЄdJ߫Ah.H7z"3t)B0l7!9DLޝӎDûwF0EkӫưF1LT\rK%۫bkD=f!r뜱tG _uV,RZ| fv.{+v.þK|u#$\y:ņ5TH~ik&cD][Gh%cYVo-Góڐ{Cș1fH*ә2 d%5)9b2sj*&߹i, ZgSc\ ϑq`=e]KMdĞĻx&8f 5p#}2{oVCgb7o ?K#:VD)_׷OZzBdP*(J[ kSSlHMgX0s/vNw/IC=w$؃49cfzeQM)4KR#%2cFy;^zP4Vۖj00_Vliܓ<}=ǘ Hn=qQ,h_y?5|,rޫ]* J1EP&Sq UѢ+N'Nao'EOBQo 4†$Vr-́Jf"jj l(IQmrϣhu^ۏ7iwɝ\˫5=1R07k3p]$V:C"UNΠL/B/-HER%ّ!h}=Dnw<]M({lmIqWA%#0z]gRQDV"j(!,[@tPwyxDv۷ ?]oJՍ5);`r:-aHS/MS1e򝥍fZ}רgFP1j%Wd踅%C5+r ICi}2>Ej'jchZNu1D۩8VRos,%#Ex¯on?8y%y*~ gM7b< v7\{dON@VWRM!%!'.< ]R W5V64@$xr>~>;\KգQkXMQl;'{E41Fx 5_'- VOV_jPktox c`n \Dt :GŐ'3ԙ摆}$/Tm2&|o% a}QkМ"s# iē9lã)ơ7b~G5fhX ''"[},up;'\^c}U*ԉr˓a2e435"{\"J6\l+9saee*09fF&:ڈ8>L0GNNkmI/~ȇ`]L2 +k%ٛdUQ/J$eR, DvգJ8($i%2_7EZIS B ̇$RUIUVXӠ"L#@Ѻ62VʮաKKRG3TN<0@i_*ެD[Ƶ*u p=[;gC^BExɱCL0ܶН Թb4$߾{q`2Mp2/8}v #SLpŸه/-03.˧?MisH GS%plӣݩ v=jb` JR ,Ƈ{NȒgrOJМ/߽xwzKFAsi,ea8H8Jo'K 2cc`}`x.iω(+*XDYITxc' ǫ:o(ծU&HcQvud첫[Nߣ UrUW.)~1v! a峔{Qqpy"hlZdK"6_ךȗjSTsEZsgC^ͩ6-JjSʰTc肐grBҼŵt#SO*"}iAr:H+e 뒌<h9蔈XkS'#qd%&1J.hZx9ykH٢nwU_bQ;^^bTL,,k~JIF`"*:)?.hPS;Mf٤0~gx5 iKza's&^bCN#cE`k !5iIErj4E;vg[֌J̭ƼR^mՈ`3Cj. |Pw:v$\ )Z*"4uN&]`3rOQ Քٻjd5+,b5HUvenS(f_Ds@9AްNT&<'!NȤFRZ9W7;#W>X_[m`] 7cUfu%I/pYfd1aT;]ҊT +fh +lᬋ \\T ZD,ljq:BXQ܀AS Xphhƍb }jV\})5i@^pʕl<澚Zٜ ] yQTI[ X`\7ZsBёq 3W|>uY` W'U+bHCJ.)ZI4q' ʌh@sg& prG%:K`1VE[A>`tdƵ.Pajwg9pO<C8~Ǐ#߃tYp?!ftFɽ w>_m@r#ÍO;xu $vﰦ J'oK_f|'}Φ> ^Jl| XSUxndO %2>JY%btĉLm|o`|ӟ<}:^?\5Y5-P6E3bN8\ua._.جX:IʕML^xbɵX+Kn/^x=s M[vQfD_<<~Evy7W'squ$r8 l4TfbB)1E?PJupkD&F=l0_@Q[\vPtY,s тXKe"h|T\$pߛf Z\ Y@b^u6ハD>#ۇsWϟ 9FWsaP۞46XΕo'ZJз` ƈ5>AMk>z]vHۼ[4Kk0d+D"$JYdLrN ֗ocBO?ۅݚY)FXRWzF3L$r/҈SB V(-r@I}1?_ƭ1.RxZ:M>?KlmpN HD"pŒ'Q5bϘKh3n" dj&y|~z-RWI.wGQ-1'yL_I- WٚW/(K~xnbGMqGGzwןg~*V7yOk|#7xÊγ 5PpE1 !;JY)RМvߑ(܆_\4OcS,r+aA%G8_m[Z=ssQ,!ˇIi=ZkxKxQz kWp϶5z8/gmJhvRNs+&bi9I+&]9Zۤ$mW*hrj#Wyx:/<sD@ T!GEY ~@{QГ*EUh<43:snV^\0./ȼsgb?Nzh[C8.Y?Zw77?q' K1k;jC~/2߲ mfqLr=210p?-UEڥj!^w =#gTmlV7^ Ñja=ƦS \J1OʼJ9I40q< iu<*RX4+//1֚Hj"[YI|I$FZA@OM AX86DJn@ [qlS$PRBcCU+2aH i[+YAIlwkm)TD٘4.):U!Ncڎ25otȳ5y>e _hn\OjVUxmW4Wx"zRnEDӄ`8`TljIْǖ 04l&eSSX%PH[)#u'2$:gyЖp ,`j\0\!u4U 8'Nq^g%>\mbExVրU  P4$'hmi*¼[wo@i&F4]J(EEE?R_UwwD`4~][Ě`. sb!VTf".9?878`~gVgJEde]n"N|:F[t0<_%|Y%[-G\WKPZSSsGtݍ`R7L}rzw`VT>yd]C~27N++B`Xn3Q@58Mee(K Ue=(F7c<WMEg}2Oɸ>W>>r^4f!pcp*˅NTV&4( g2) $e,C&j/.&^:|k,AOG8{gx]4V |o78j6VaR> V$מSVyR9Q={7@R6C(÷rTgIsA ^<2c`IҲ]U.Ǟ:~KQ0;gj-ςŠwfY\6qWZly f,b)9`ci>ꗿM$IB/ْ~=F/=Ֆ)Yod)&Y$EZd[f2+#"_K06eh;>>>5(vb^ǜc8Z{tܕ) 'geqN$Ջmmj6r☕M(tҏ7kF4&od>x G4cbGּf8"X؛i7Tg@Q{î"2՞NQj ϘR-eIh;1Jqɢ@y}XA',Aa-"ϛ]2豺_+u#H2S!7ex6ld4˦J=ZeJxʍ =6Yh&hf̿W*y?eS3fWԿAʬYDR=q2զД1ViJ)5R.ֈCSU+{(l)l1yj'aD$ Z N(͜wMhX\5ZUj+j.ϻGռQyI0:zy/ITvZ)dK)}-t17!P6s1v4#ռhEu؋JlC.]댢΃unK@bmf)MfMshfhvPR i7haYHB;Y30]w>>q;bW5nLHfɼdYP61r袖ұ=cJq,h8~EvK^6ޛnͿz`S|=7^৔G'( c&w-\^ۯNm׈W70+7])]v}푨`,  ype!or/I6zsHmT:Џ{ҧQ0yz}U&qdQJlQer欟Ք}?A/9oGǍp{ G;FC^Spa-/_$ݡEGʯF9tZ {tsFH\L@iP8cl" '_d`9w|[[g<S%zd[YnmX~u|& ێd6O[j_O}?UK&qY3@FJ`+@|_{>5 Rw` "spKF}f9\Շ>'nTT.Y9Xy̐p؃Kʓ 6d1rÀ7Z∡%wtaQ=:5- X"*Ud_봇fyu}AqcӄJ3ohNKiI9-I4$n{m:>!;ǃ#1:I13!d(}ju+/cYڝR%D Ĭ2;9Z}D eN*!R>aٻ}PK c{p:v4>쿼:O|_.W˼'7*)[/gE' 'w>yɸK= VicW'5dk=+p9)TḀϭOZk87;]bKwc`z0Rاz<'P-{]AuحR /^KbP vq)YCQcQն}:ۛ:&l/?}MLy|!>- gǂcF0`' Ν6jHV/dʗͼ/KJo?6o>w<#KFng )h>{TZČ@ӍK/]Atfxȋ9w7ߤ gV]}~5l͍[RRc:"cr㢢bQM|d[ͬ{$/ Eڕˀ N5$J=I -@ǷOu45ym0n7=iƹ%Y964.vsA\7 PWIal]b3qV;W}`W0խkMx~6$k?N&pyh&ANOdjTghH@<ژL[(dk#;M/3EAKv3O( -DAdj]7`ԉk颮V5e}uCklNx^Z*]Kc{ZPbDԧ6%$ b$ 916 U:'eءI͓gdKu!2˲jM=;"O tݡSݟ٦0tWiZ3sU &NѠu* &#w,8!$1$jjn }{EZTjƳ{K)-xL=A{bd[>:$e(ģ^^1!(5t3 Lܳ`bn(9`N&tQ8u"LYZh䋏f@KA^v>\dE'I՜Mwsk Hdgs4Z#lb9> @btVDd0Ah^qQo0pp-‡5-x:9m5aJyO\bO.&!(ΝDU }PhѶ 6[`K7"4! ->@zHD%6dS-DD`sT\1R+9h.~5뻲5v8n8կ,N*[eq`g@tKD\jb".J[8x!0Ȝgi-2*F.85rt# 0Ln4\p_dO [o _}D \lU9 h{.:,-e4"y@e.e47I: C{y4b9IDAkȼ U,xЖ$$F O+ؔi%fAcDA+Gj ^s2fTR6xdV*2-#Km{X >z4_M+ϒUa%c<^F\RLEڐia*jS9IR@͇lIpS٨ /x?C=:a4QrRz$u: K\ \Z| -(Nr B9~̑pe-HW%pAl<ɢ5B\eΚY]wN h"܅KDD#9yi9 >ʒrSL'7<*/ޠ{ۍ; i uwm"C03XQ x9>6TS|; |A r/3jybslnpߗ3'Pc9 `S"sJ9TIR9X`d;HY|V:xdpD&-Epnb,{5OY1HFx}T5e U@tz HaN%8=ՀLD^ӳ; SIs̴n5uOAafWLYf(hpOג+bEÝjdB%mrƌ#ljyw]Tve.>FXEˡ#Uyzz@ {OoݝXcäZcuJ]H\L -,K^>dKڍ¼:]WvXHMyKrW/ g[kx[̚[LaL.&V1u7࿇o~]sweƼk0~g_ ًfF/ `QPΏia{"ֿKszM8w8>@H&:†h}= w#bVFp#[Uch(.\%[:f NDò`!A1-#Q1fȹcP _zj531os|/',e胋xz먝Ƃ덐-)Ë>8/οzϮ@ON h-"$vZ6뫒4I)[ߊɗX(RKDǷ7Oz) \^#޼~Թa|4+@٧DtIooQ鿓 a_yu4gW,zl]M3`Eι]cITe0w|zwij`[Ɂ{3'0zA."d!<(eX܆*G?}]}ĀhzRЫߴ懪!@zh8Lku[ [i$y|_qi͒WZf [rAbUvNKZy_͑6ÏM{%KHZפ| W9kcGyn𞬊4ל]dt+r42j'd' XZ`MOjꬹxqrbvQGaAZBg'd}1tbJ8`bI~4v/%[ qR8sIlZՇ,[aw|H)PF9'4Lo9- e::sj%Ԍi Y-3yɒd3]sMQ+miEٙYYy1@fbdik+ ߷AɒIIq8I~* "|(U"ևRz8푠Z+ZF"@P:w>$]/%lr@ *>}ȵ(% J5X r,kYS)*K%\X}tlYԩ:lM9 wIF\zw'寁22F>gg8(5Tk/4W s9S r;Ѻ\Wyof '.C-:mhCjmۥ'Jd.|X8#3VeAhh XC>QjA־XJ BǸ}k~lzݚ*6l_קs>ڗDo Mo:V@  &1)}ZEQ-Ӵ98alz 9B:6gP՜zٯϠ؎ƿ=Ѐ a#s+h@2˹Uk#yEy-N׻l0#AXskv-4KNg'd뮊y0{!(/˫ڔF4rM<7xu]uPqn7hNeխ.H@g|4VS`ª{tg!/bkR2ZR5{uk; p)*8Ng+mw)BMVFpz{GM]B7rV rNɡ474bP͋ZTFZ߿܂dAtݏv\quw sERXVDͳBмpjTHn(k^sj׿;;5Jh}h saSsfXP~۸Ev ~񣒱D/ӧwWc/jY;ۮmDq顝T}erW lW^- /x;Gq}5vgR(ӥ]-:=h=lQN=0wEZCPrʡdSeW{kUl dWPk_ 3Ws68rJ1r!TQ1A8ASQƊ-( A/:paT{VrpD{:cNu.̄^_LKI/*}3 uL ,EήߕDEClջ7@sۧkѢR`%DQ0(ʅnsa*X8 !$$e HT0ktu$7$9֋C<5, He=у\UZi$X26B+f_#->{KYB(u~1Z3;@:sSce&'gᰕ rv㖩dwUN'FMJ<@ߤӅ7D%^Z_g L& zO̔J]Z}1vr'.nVdly%܃Av8ci@g ޽z92+PhcR$_a$2+p{P6:El:'q]YAL4j_yO$5ܿqM5DrmjX})`KSеq S\$տK* l\U*s#7JyL'op}9HV1, W|zʊND|ji| ʇxM{C7Tqt }tTÙl&0{(Ce`3S/Bκ]3 `LK"06~ d.d8J`٩lFe~#K3r)RY1e*qQ ?zP^v~1/ѓиQe\'Y4؎ |7mzv:h0Wy¿#fU٧ugd]h^g(3yY7\8~=-9nce{Nϸp oh0 /ӁtgwϿ A'o;7v^RL:7E-YB 9zR.4)rہW_TQ/ࢋFcMï..ܚn:?1hYc{s:Y NWnܶv?Og ڿ\vhj(_߾=18~yըs]3_t_^]=ƙTw!]A`0i1Ӑޙu;v&f=mj>3)4鷿S|B>TOç`p%ot ϡ: MڔA,yFvbp9~Og1m>FabƝW:r>J6e83pC~*/l>ɰ+EȰ@XL2e5q#E4vk5,l8~ffRPgA(xH;icg8oӁxv gRPcul` ?w&W+|KuFYUl'6)s|,*:$.y lqU7\ %tW:!Q%ژ@9VP1 ThV$QTeMlJn1h0vDiU1HB$JEyK"(j\S h:։G_[XK;<P% =5V\ M Zۺ'IpK NJ2R Id<_Zϛ׮^rD9_uFf%C%xwقJwW{7?hYoޜ.y?v1qZGܬ'ߎxvJ9|t|,7Yg,=\X((NJaldq,dL HE01@0ã%t0ɩ?p1& l,g=Υu|GRDGF6Z4V$Bh"Xlj(ژ|z-+G| z-\BQx&1?P&SE9AcbE5 X"#EDX2 Pb&ZsC M0Zđ2Tr2"%TYD#~ J,˕RbkpY> (<]ms6+*;lj6T-<#ɳl)[EY"d(xo@w 6̭ ɧ>M_b;% ͂%8 !+J71$LAB쓬<EEqs#, iFOH6SjAFhzATn#f&\Ty ^Qz,VM%Wݚ`lxDAB* 3a*cSW']+d߭"ĂvŅKm>MGv.et@X/}?Oh!g!ѾAFG zl>j$xjNDK*8Iт\/!t$[S~ _7_BH/Ϸi\; 0ؾ5R_bKcR4>YCmssޠC,~@yV޻8U-э4VJlt냟N~yޮ>iXNw|7~7d)4&Q[Ю:AC{]=MR+eckmHl[̫Iq s."[@W+ݖ T*@Oe|Ҽ\ij5,N:Z9~!po2߷?ŵ.dL)fiD?T3 ,t٣Ox@Y3 ?YXIpiO ⤄FD8o-/G\1OnCgFdg_4R4`Eb_df=aP1  λ8*U2. Gu'g'm݅˯y?S]oQL]^A"qf%GuᜣgӼȶ/~.J9o%7%:#HB\FI%$>5*.y |h x4"ˢt\{:8%L$%)᪒}L ~W!VAk-B+[W7ȋo|hV1MoۄA&N**C27Q.;R0lX{}QO֑:<,C3BBլ=,'2}J_zٵ7_O'L~Dqg͂? <\^-vFr'] FTj>[' 0ysq@CFȞ#u"2'(9kJ)HqvdEY )s2ijpT[(vBʴq5FhL$Ruk;R^"BF Hf\(v&tE : > ĉD D$*+>M 39" ErNH=d\8z@tQ?X5-@`O5-ˤp:K~L{T$8K~l6M\H\/x@/_bz竞R.FضqpZBj}qTUV iŝFv瓧>,|Q-XI?B{ f0ȋaWIw!]yGq@d͍ٚq2[3mٚ3x.] mHh<>"[izbt>(Prl*NT{")DWlCSkmhV;`|Pj[ZONiT E 5vιno &$taXxWCB"6Ǹp\pSG^12 Ye(m@S6{@a6 hq֢"YQ6G] fЬ_,^cF!ecr/NGtUEZQK<6jPi7wWfv82g~RUZ.Nf.!rbw?}t2?wwwpuӕkg BRn-]/rƋtc<l̾ZN}P~]_&a4e}]ȣ+x|mhY㲎@$he=)~B2g8~Y#U Cק!z^ 䬒aʔe+<˓VV 2| Yӵr̺g}&K⍵NSs;S Elsdw!"1c]$N-zH&築\=E˵!OcҢAiYdsq1Ythutk- 畴Cys̛`\jT; )\L.CrpD5%:p9GPyJ2oTO06h LJqyi7<mc˿"ˢ{WyOYHr 600֖]?Ҧ %۔DZCΐT]AX#r JQ&ngpN*q7Ba wP@!'6(4TD&xKX8%FvVhmz& ҟx/ެ։6v6q2\5Fgb3 UsA AwrPũfGt mJm4ZH6 Uz4 deOe?р^q/g0xWCm#0n^g{zpԗNC ɡ-ly;h,ƚٌI NlMCntsS_vA@,&k?Aҩ8rRҒѕ"!cP| <n06F݇QC@ʡ }>.oT5PNg8n~n#_oL6Pf'/!O[fp5՛\H1A7f;lwC+m> )!< %SmPtTC&mQ,8YGO (J[=pIޕKL=\Ph %-[@ޥB;19ÉVF @؂)LIFd rhqR{^ǛnA]v U}YV\eWfٻb9rlI_-/:e ̗tZ?̟ϊ++ts}yY.@z/%q6ly}}[aŽUy#=ur/O.vb??DQSSSNza,駓E" +!UU3/+ÁEL Sح#7sik b TζeB9ڂ\{_T)Q>&- QIx Tڍ-P߯5AY,wr˫CXc^{]또zDŽ-jZ`(g{1Y ݆E?HAny\~K&hK0eW/~]5wJ޸;]:&҃2`6Ѐo^ޞ~\ֿo?f'1;ŕ12[)׼zt*6o1c%)E4XVZ_"g{?d'L[bXVnx^;6) H !RD#dRe00ɍ"O fUE .#B9 b\,cD*" cؿBAC^um.wS[D^ǍIV_HkA"9(j%h[qAY(JZ3;lւA}߿߮O"Jz`М2SA]$j?,2E m2[8YŒQ&M6nwh7Q@@3-dhbF0稑9!:"`<M*JQa:܁fp{NƩ!js*u5!VqWա⒱;|!57(TD9I4 -azDv+.G12"#^u$ԯPaFҝ5kqLCп٫x2Y|?Ӕua(1!4m!Ch A`B\훀Ȗa0 yBKKN 9_./bvZ C@.2g͹|Kk5⾡=12KРKōE2V _ReQ s:: sU\=7(O]fF8Z3~-uQdק1i'Rd}y 8#Tsш%W0e)7dA)PTNu)QI'&RDH5R@GDPLn{7& r8OQ3bY[N7pkۍCpw3CjwýyB!vZ1認UE~UudaLbG^运1$z5;P<0˅/ %:͠N(tĆ.~~ۻLn%&SmcuE9,[VqXP=>:8p#lDa22I?Ȃsm@*SN"Ta0V9сSG1~eJ9Xwp* O - & 9Т(J9i)bYШLҖ*< {"U̘6b׆՜ '#PSnl>i!>]pȀ4q]*CS&ӘWuB*:mGGuݛ)LWL0xrsOY,\.E]Gm[ByCZHCGܑ<+\)W]ui2a%yvMBj"1F s*\1+ȟT7i$-=&7lJ/|+a!_!Eb;Ș?A6vpL|B@E JZLN WD\f( l#E(gqϟE :.EI"v>T(j%By; ۻy;PÖǢ\Ǩ7耞\W8wc]t4uQg2!ǫ wqD K\-ʯo~p!(!dq5=ĉɫAݰh{ G&!!gG'pvi[<; H@"enĨ1afpzH6R?S0ƣhp=գ e6&('rU0E׫Gۘ|h=l֢щh;-udxdRxq3 %?0ouNLW5Am9BR"> |~2ɨG̃"'A)C">mu,5(G3(XL2(J"{nʠHFu,A !5cXPff rx?-'$t9t4 I7T>գqyyS3,U|v*y|>qWUJew=_c計nl^\8D1piF txE7Y\-l?i!psaZ'C]9:6ޫBkOM&Hϯ6 *HP%24BJBovLSv_pea21xvggGԴϺ?T9)]h%`ԜX*4K ϏjY~yuony'L((-QddE1En- MݓBWHAQS Z8Qi&n߷={,[9%̳{Bγ/2/ɨHd|v<J>ٞf(/2 J'G$.y$#ٟ'?:ȲjT❻y9 u!&;ٽKN$#Kͳ[\qJ_x@;=;d1dyv߆ F K,0o2zا %jhAzJ#VYAdkJrPd}H @p5"w_/O*Hn/OR'O.;fU|jO0 E7O)ԫb9.4V@j Haly^ļW_i38dVkq}Dw^!3{gO>Ӻ<^-/BQ+LeyV}Wݥ6g:Oi|Lw/y_[?C߼`W~X;J?r??:AōlCggh8^0yQ,r ]p`* @P˿P*\if:0'k;]3иLQBTx&@0K`R \!\gaA|9owmH_҇KH`.%;9nW^ἥ~9|Cbq$j4@ntCqc`Һ6;A(r#F:X(ʱ:Z>>I+tPN)~z!Xvensa` Bo=p J"^ވ)#+0y,_|R5 Tt?th,2[v$=^T7a櫛“O~'{6F3MG>N'WIa2vnn0mNx{SD]u5db0㾷 ᖣ8Df#0pnc@SW0`NV{{i-dϱf~y+ [as-W|Ύzֵo.'O{3W`M/^Eoƍ:G(3($&b7Ѵ?R#Qn,dn=ۊ @P}|2NE۵j0A)ef2#ȁ]JUD،mYeƯ>5"閴Md*b[P@^/mX1K~ADKT+[Mh[ѹ~qtJIk:svp5p00+z5`z WE J)4q/?wuB8],ޅ_8Ie*|?̥JE't=d~RaeqR֐p)ѫMsy-:;.vRqhMh]քp)M\nn1vn"8!h-кڭ y"$SL6 s='8 3&!:Dpc"Lnn~7^7) ZrW!6\իy#y^x:+nmىA s\ Ț=dZ0mM{ᯉE>YȁL^Z(ՎA5G8y+K3w%uN͒yESl-i1LGdJ$(4?PF\ʜ)9C GZ>kqX,)?\R"9+riR[ ז_GpH膢2c^I3c sLRkSI3a.p5e1IF)i!b&5QaE?v:d69ֻsa<8 cVKyg|F96&_Bx۟7 5Beq@Z!P'p&$&V]F Ao:Lac)aKدp 3-Q2BmMH͛D.}o@;6AXB0,SSIuxV)k@F圊_)Aԛalau TM* ;WQjXT"ƧeL5ki~nד"/P3a0Ii 5ZSb|jd=>zŕm\Bn s"SsbC$E!&R 1nqbM`™mcF#9)ANf~<7Ȧ;TJ9yd;iXUj4xz 5‰>6o3(<"rY%6>"ց,Wkf)t5XpyW0-3c7>B~wz-/wnn4aIѠ#7YF)%T1p HhVUs!6JWbbVkwۮF"Ex\) po202\e}DPtra(A1;_\ݙAAqwb^JjHkg) }] ЗMuݦZm]ph ?ڸY@÷KKR-IB3fv[p4 _ۂ:(E'l8j{]g߶&/QwKnIC(0 fIYB!eN!)?%?劣>uYh1vNR^[x)_+&~/%C> E]~`Y?F݉/͇@CbIԩp'ȑgW;?1^+0IשԀr4&JQ/;!J~.~ߏf,~Mg~R]^g|G:IyҼ_h v~?[Gt}υ&LFt ~یN{}˾۝eЃտCؠ`𳊐-ۛA>C gHifTD2+303,SqXzNs/dezC0D:*^%w!S_,Nch8DDŕɵOEloJ(mΈ]4`JtuMPγ|u|u:מx#Fy: mJE/vAضtV ƕ[nb:󆉴 w-3قZ:PyRa?ENfVqSBbii qTL9̄4Ǭ?Ӊd'n&Zzm[h##VYݏ<+! `ͪoȭo!ҥnd"&ʕqLwraiSVHy// ɗߔ2!&2 r6{*m沬Ԇ!cfrqPlMȯd?諶"_S[S䟄N_k)b}Y68Rk9X14V+ISgƘ MXN#1mcIp'eQZgMRg#DJ)GgĞȟGc|ss޳]8 AP8B2?dQŜk`3|<g2bE)KB{@.]L<=T(]I^,YO< k c4uQ |n ߀`U>b`|ϓz|p%m{ێREE>Ө.>-Z0Yyf"ZK,h X)c\qTI $,Z^TщUbk{@ࡠf]17JgMdϝfm?k9~B fPM ǎ+  k ))e̚þ!ަ|h ^ ͫ]ApM' 8l)+o E1< x"Movm ]4 9+K y"%S$G5VV,[$D'rG`nn H ('y2k9hX Nhb[4u[E4FgXnW.>vŠĎv;a< xzDMh]քpjֹ8WTGPbPGtbE0n <h-кڭ y"%SYȊv#rW [,uD'v] ϷCkhBj&$䅋zBj$44448ÆpPSʠqn9 {t FÇ&]>%i;$UjM9lshVS6W#QiƏ`Qa*R*qӷUt^6[#ч4>3 ̇6TE $)k'i SpiJà@5']ggfhEdFZ: Ǫ֤Y>j&, x.<9hY-ŜvJt`&O.6 ۃuyUo79.[7 ['B Z) RB RD_& R7U@ )"LV)>K-< COGo≙Lp2^?Boܙ"ǫS+nWDvDX w{Ez?{۸_]~n7Nva|يiIe{C&)(蜍 cSGWUWWW&'s͂75assMUMQb* S\s&#fdTޙY0N!3q3g%?QqZf >=Z윌W$゙7" bK#ĥ=1 ݞxM3%K$+.>1 <"2 Mhdĩ*CC Jym܉]R:#.qR?|7- ;ZRbA/g 廖 z4?2\s)Rش(oeu&V+%XXo"(Wi" ի Y)c1Bc}E= I|&F)7.$"3#a6Q sD( i9/>}tkސ~MPٕ'x5Cc})y;G,!2,z )3CM7'+)QaZ<f PS)hB͉1%TqfJCBK,`K0ȱ`dnbIQo\߯ض /E>:7_lp" "!.,mo&$BRgd/̏dڪ1lȎC7ČIQrͼ^C &"%?xf:M#{K&6_ o'vPT_Lot(HWH̃(4(XBituUG" SEn`4 a 8} ˜h"iH B?Ҿd3F" |%:ݻ} {ݢ3t4#'a̙FD|w=G*wR}OI]N4=IiMP;Y ?F2EdO C,A҆(Re"4TcЌjm>yc@Ƙ )17`mXK #|,D-c@ T)jMSq옂uAu x5B84 )XX3B%7>ߏ02Gl@"tЇ8 "Mu$}CXF8(ȾRm>}#Xp5 jI#M $U$W#,k4S$h\H< R3r~$Q({`b<=Y @|i~d}:(~սO c^M%#v3X?gIe fV1F,Vnoo1*'kv@ag='D>rUqފruY,RL|)OX!%18N o 0 xmF0o%(]Җ eΆF,Ǡx)ٙf07I&Iz '}umo0EmݩR>^73-CUQx[Ga.?k)]O}w.,HJg,fIxv祯:z6^`.>,»,"ej_BhMWXJ5 +71|0I~4TraT"'Q6;dq7 5dUU#Uc 2yg+*c*lE5mZ:*s=aLe>I N}8;?ዳRɧȦAl#EnTvfǨ2 JKֱysB1dvh 9#uGbq=q.|h@焝3oIR)8%]Gm"/쫑 *AJ{-W=wUW\Қp:^^j24BKF4+ [!R ¦X1𒦥NղfqLr,ËVIZ}8)i5srWbRf\, i( KrչFW.q#.{Zs\D Roޯ\_X*Lq*qgV]u߬Sl:+r B)Kn]c3n)I]zaiuQ`d 4@EG/TAg$6%@05~A^H%ØE#fR)R;U /;*lM‰ɏ>]7ءI0[ڣ7mIa߼" ܴ2ر{ۄKp#^M18^0S؏pG?yD!AS(Fd^ViD(Uwj(zX6Zɸ5#G7ѕ Q e>U*:NzߎIpowf:>V0[~ :jaRdbO=nF>= D:Vѥnc6D֌ꉞZ׽" nR0`o?C hOvҟ!Od9IZ* 4qXP4cӥ?؏$XMNzZӉR4E"͝0]`] 8hB đ, iq5jJȍ=%`c1ɮ\CI=}|N6GQYzbS5y9zeY P\Cy9f׌B("EhX='B\Z4@ +Ii;I!Z1\$I=Q)ۛF`؀-ꀗ*Wֲ@=N[PqNA<WOFD` nyh[7Vԃ/qsx/7 i"WuΨvdAEvY9ܛR ( N8ٲ*:]$A}#/5%{O~]tI@W 6NԉD L]Yy%%v.?CYt#֋hށ $4BHp-=pr̀˯j UB,+{q{\2ݤvӌRD[lWHM(6TXwˀڞITd ɦIIRXjʮ!= 1XqUagiiU'a u#frЌ#I<"y/iNeМ*7ڥhӔN1JڮiZ{ĔIMn։gd@ƒ;uV ,gmIvbLJtO(!SYl;^OandLU\Fm~!/ btX{#%GA&MIRQ7Q$`b˱N7 C!R]a*&5}&^>E ~R„\V1EVh8 ȡ2IAy]I)ljc=RL̩ʙRt kd|ɪ b;BPt 0FuKŜ[_;N1B~NVw)ܞE0sm3O ;H @9jX "/?]ލyKLhՉʑhLᓶ[&p蚋!Sr m!Ws&Iy{\"ym@QRZtx\ۓy@`{™/ҵ k9N@3uLG}ԝ@%ү1ti^6-Z՞_+cVٖ?ٛì Lq$|{ݷlX~͗a5ȰQ||G}GP>ig>L*][f$> }_Ĥg}g4`L?#,0CL^:wnXvǦdiLr»_Ӱ tUm iۢ&Z*]ɤ4®fTo|v I0!5#yC.T.a؅-\n0Ayq y5̜Nj3t/U]Ҥd֜:Vn{a446r5=ze> Yq9q[P{kQu>10RG^?FЄkl¹KI*@W0`.nT.brl$H Lf֋ER.y?'@p(ʮJȁ;UhUsv.!ZRJ}[=tń"yi͙$jJ`CHXqG#:[o%kSD1qX:9.vTxs` E`̶!n#$5*89ZʛWDˡk/y&5#mp?M8 WyH育RW5?*SIdi9Yv9qV;~}6do?b\snw'V%xN'><܁_N"ʢrUk|k=\3DQBS'Kˢr[~R \xpn*|[~ Mŀ \jaJL=?' q?׈""πirUHJ9!%v&1jjnJ'C&0gr K:.:^$~nzt]j9+ڀ۳z?\5oK@\kgT,ߠj |ϻ4٬n[dbu Bf~IS!]c\0 u/p*/PIA]o@}(]#lz>W;sI]jFimw\&TRDڼ ܅(^{ٝ-H@HIb/x#sy6I]Iq:O;%*NWG$9֎ dG}B PAK]TDJ!O*CFd=Uq/e{Av )iWtP5K,ϕ0jhy^d~ܜ rA\VP`]Z4=2\Rw++:@f GO-L u0VMJ!(OUTػmdWTzԆ6W!5M6'd4{\ ڜmt񌳕~RIh7ٝ(_ht)OMX͟m5>0ᒉ>S<|lFڴžN> j侽~v4=qHMC/8i8=C>GʎBL/tS %"aOY~ާOIr< ,eey%pOTe4@pL R!% ko~G@ N4~7DV^삪b?\/4P60T=HCPE ;5:{dB 12P̕6Ɣ M(ʎ"wMSsI8=iF 1BW!/yiJmT~\״6ݸD! 1oGn?͌#OdN4'P(l響ǓǬ،^$?$ N̿5H &dxdݰGԇ]+X#ؿU9hzlˢpN$ek-~fjdP&bݢw=ٯs# T-mLꏽ W#D`ՠ9 CCv֌x :&~1f>QȨj{stu30>N&5"4 X"%JKp@P܍J=DnB*,AÖ$HN ID&]uHnV 6"KrZ?v"Ł80(>'$mQNsLi~ N D&XOxi= UVn4ߔ urY9~zNm1ΒECcٺ(ރr?Xb}B]xv 9*bв[8R1)`"J.s>d>#OMOy@7)h2vwjgep դ/mp n_-PM ꖣTj!E t_ű^-udz7 BjeAwtϐݍy`tr#?45)s2{oΚjpyչiwj!(9|(J7!}Z)=1^Q^-u&2 5_i1DZ$} 5;F.z2 OLXyWš^' V/(C~&[@^Qb4?+ʝGoX(Z# z0Y{C,z6࡬;x>>m:AҋI@:JewOAFtGAd3gnNe_^zM}uᒬx<=/>1^ thu" +{?t:/EOナQOoj6W:Jg3^NoCR)? (W5V~jw0sRt8!@U-x5y*9;+s5[|O DkW G=p)Y c8eܧ X %j 2inR\B{PbH~oky `(3)'fʀiFT_A (/Tb>{:5_r7u\cqYl/QNedt{sԜ2#涐v8=>+s_\_S/t zM) ]Gn1/g}Q ZQvk|Ve{7ktop{ɁOYuzFyB^cFʥ'%e!֒?-eJ^ FP267gc^ ҳ}g2}ڦ[Pr9lnZ17WVa?3ip_#B%\Q"N`(r9MR 3 r I$A,wM&vP#8#1Ne, 6h2r $?S`\r*iJӸ{3O_ BZFBcFFb*]v2 u* f5N}p)RIk+$6<1h$VInA%PTr4)3"3p|›k{B \v:gZϖ>Lm{OOv~1ヶzgk7l+fq5mE5ҡҰZZ(+jw܅`h4wn>^n5b$\h:`޼%Xj( |&73# ˚ XwE&XCq`D40BFR &K 8*){d 3U[+81~}W ف1^'zmub%@2EsQ^jJ=3PvޣfN?Q nPE+Ÿmkk~6 [ PBbk ! Bix0$;ہA*8k 9Ҏ 4M ў`pZ엶 ޛg:NZJg6 #{ FAh /:u:3f{"Q l/=&UdJrPؽ0QRb--bR@JD[:I*qb"fr|Jo&($tU0T$n4aDR$@J1G $nPjH9H g6 $!ם}QF' T-ve b94:7*c*W!Q|w>{IZ^)(d"轕Fz+NR ɬ+7o~x۔HI#JFf …pr6l-?ԭ zӓ2t+ٔETz3s괧9G iCy1b>Y̯?ڹ%D+j07~M6zDsUM㯹,fhqXx` kXO/^6e=]Ͼ{ui\;eAg0D$"H%OX*P•ۘ pגl1w2.¢HJJ-ތ[֬)bO@Ћ1޴`"_:w*UʶIKa%@8{!#"" $G vh[jZNH`,I 05Y$:7i(c(l }yӎ =Ȱbeu(&yX+iO$US9Hc%>͑btq+59׉0F Ls^VyaT!8ZI"iOLbJlH u(1R+Iq@_;oM0@JԚ{Qt/j"5Nwg-H֑DB H8>V8A ΃RnQ92Xse$ L,y`LIcآ{ƭszN~1"v-4M\[ՖEdk%kmronȫ|r8ѰnNsI4E[wO;c\]yIg?S7XLM)8)MWxʩ@j8O5wK+)LIO1N"m1pKǭ{vTx<#SEϻS1`L]H Okݍull;pIw#,~rORq-߿$\. pUkCҵ Ee띩qG$&/ >r)tԆc!V)*T?⡱[Z Vx dj \)T2gV;OЈYo4bܚ`-7~~9y;ΓƓ)=\W|(,+x 2G=]yA8!:Ì0{s{UZ2Ѻ #tXL.@o>=@J09P+&yJ!];q »W$>77D3qZ&PJ`1cT:v,'N`ģZUSsfp`|{=&(Z[//sIaj`a^NKPW"1؇`} a`/_`/gSMJũ&qHs/Q'0#lZT1/{.Z7Ybm4C 6_˶@cy`=sɃohl ^貥Z\fcNCR9Y&S}1UԜ.DWozV٘ k@,ְZCe~s-<"yZ1<ˮQu/vNwB4*ۆTOɸOoҮZK5ڰXn>;otm_B>&yUɅ=AJbUb]sb7m>[o]1ƨEPJSFe۩͉VF=Y"3!u 7F1(%J8F1c*d4hRf4S5Fc.Pi!,; h,A:VRwheYĨx|ˈUX2eTq_HV04BA_H7>.W7f ]cZJЪG+ljV[(yKZ;܇BNm\t~B|}BxiOhLhmE4{QD6P>Q&##[%giGp8ld[D S`HC "Eet)PNcqŦ D1et*FZ2;N Fu,)DILKdN"QϴNڝU$t'N)^wi(86Gks!MqAv ϳk$)SnSwڟJ RjnC5rnESR|MD% 3IOe`ApeaUp'zߊmeNZǯ9Ւ!L<\g0{{D`+3J3NgI`5K&_ILъTTvi\/BLJk$J7&ڒ4MVOLUF+4ec"cJg(x"j 8Mbujv^*Q #]n4ꞆUkj%(YIj4}2x\2*M&vz !eGbc{%rf2)R"ыcSa.ðu ^/, 6SC.m杓J$Z4)I l1# ݅4 yfKe"eL%X%ڊO3|%OfDrmH{!W,/I2T P 6һ,l406x ;tɉ{/`-`.@v%<)$a5@B5P CN Y@Ԏb `ҍ0Bz 8T².~k:/Ó qN5腽 UlREgVyjRw-:&Bo2uQ7b~0lHgA^O{/?j>PxTJUCf"?+ܨ:?7h@ >pg. oy E%4䙫hNRGY V9_U׊c/BƩŨzu:(JMɌZ64䙫hN:utǺ)FaRe:cԱn*umݒnmh3W$*U(eݒS-(xEe-!^+ڸ ze8m,%Lr51/PZisZ(<'Xg)Z!@iETHt4# С@a}*:Fр^KO\!\Et Z7([ T'qZmՍftKshukAC&ͬ[ric%R6`DNq"zπs L8(Ja\! W)pSO3 eFQNEH!BuIgԠ<b\(e3! i lfk& :*:F7g!#7^Аg):Ś=&(ƣ8IATiACTXWwfn2&Q+v1.Bh_^N2ѱ4䙫hNIZ8ͩT,̄R+fS+gf9DrTR`%U2K9YRg|>F1x?ZQ N1L4ri+-"7" sלj(:z 9ؤo Rec\RǨyL/akCC&B$wࣸ*abSHe4:vтd y*S1eK|Ļ>,4n'{6ݹ_h.Q BɊ2D^tv~^IStpѦ"Y9&Q]_L//z|p%f~zGsdZ*U^-NK2Y30gc> m0K:2q6>cBm|O73O)~nj ;3;?;uĿYW8H;8́8 nY/! Nb<5j@Yog+ 0Bҝe^ |(RA>EGi^ФP=3?xB680kۓ LD'3oy+ƻ$jjN ).y[㋓,!𮐖RJZ f ݫӚΓ8પziM:Ŏ7Bhp.<2ciF%bc%[*ɕVJ1?]8| ˄9Sg3Zid8s\xgxg\!'n/{P(:vMe:dY!Q%񿖅N~8%\5c|dj1ÂfjOOZs:!)L*ɗܢ\ZāF "jmKm<гLvHx % *y6sk"+E !R%YV`7n!Û}XX!T:zL2\"Ĩ J*fC׼,d`k5w9e I% 2R.!f cL%I#Ʋ#1Vzu),bjg4 NauUe elJrSMlΦZjJMŃFIf6DpeLFpV(!3Ií1a6c"\>K%v5`*%iexcD`cak,J rA"Ajl-20@ОXbhs1>HSM1ϙ6jI6Z= "yFuLRvCr9830pv%Q՜(@'zki݀{71oZ1͆7oM❉A-J[blcVis Z+h#3UWEIugo.?m s xV3c)8k_N?;Do*sRLҚ9Cs>?`U/- Cwf~ (ӋNhdzUzod%}ҙBk9gplghBKb:/]+#Db"ڮj7Wm $$Eݥ`8&VKaTQK\lݬZ,ld䁢 QjHGIe-Zʶ#`R^M]%l: `,&ζiv.SW Nh&ƛn% drdAPS)9rXrZW1KAb۳BbJG}+ A܂LGXJɝ牄b[/$we͍I(3 ~݇{<u7 $(|JM2+/de*s^2\GsJɽsM>ـ@ Q{,&hq!gM״'V0 %}{9&H)"6.b5Pr)kGƋ/>KPG"66O<_"C LVESeq̑3)ng}2+[[dɖ3c\/ e3: sV2xH%lI,* (s1(o*fG(AiJǞΝ @9TSeQZaWA~v1(^AϽ\1FWŁmv8֍#΄6^;rNK5v9[N9 Ge-m#-32ap$Z yKSdih=PXAe^"6]'Q+fR<dWuLtb6@{՛=qrÊU&=`2` lQkfQ F{2;d{{@7޷c@5OlsGIkØ덀@ lNrU#p].`k#@30@C.MEr5@[pF-[ F {plxKt m yȨ\EtKeH;%lfB"{!I s;TJrL$*\QF2edbL 'Ȃ6:e=Y<D⌞~t|F)88Xٜ QOtRVԫqa%1\H0z;KԡPخ'QnR˭;QF b+es93Q@,뵠x>^6U).2_Z zDVb+i "fJ;gZyփxa':& ~0;= ɴ{s #(!dk-"6$\ X 3xk (! )Y64*m'P+|E7]'2HpiKAJwUiU&n5X)~ݖl1v @>lh<hǹe9΍)=OF9io-=o!e5C:-nH@x{_A +Z\?UKH SKon/gN~~Hލf(՗5;w‹tB Kݞ-Zm٥V]u+3B4бjl쪻|f\];;c)LD鲔!!\ $׎cV< @jRW^'QC}rqaSqfW9[$q[7Ki$R)q #6;XDviv:SlTp ]2C#Vn<{3rnb-{)nÙ_.u;7>fpq}lO_ >u|3GWY7SOʔ}ed楮 *tBU)0o9}|I<߫o'jZ7zvCw|0})_͏ƀt<l{s{3r` |6Yo}#Gġ'3CJ7g\|Jcx>ś4'Yl؉82>ظk<9ZtY˰x@fRb5{+J!7躳YڮQEϧH4ͺ۔Gƶd< G #bԳKK?<$Spt'\zp&aO3Ļ0}&ڶy,m'x,.ņ?nY.{#t|ܠpg͜ #8B^ ˳ ΁^0PF*c09o>qOJd\ |)BCr8H\G 몲)Kj0NptLOwT> ׭>moN8 raOoC޲ې‘0ʮӓZbLQ'&ɕ挷'W7e/<)ae?=n&c6Y=?{It˳Z&uo֤K tVc %O;PEG; 7F?nW\}^zf•N-EkzkOnov?ӌ lv ;b7Wh;bu%Y(h*n%Q*SdA庣꼈؉)^ SyM`m"U9b_L?IthD.R}sbJ qbΙC މ6#^HRoQໂ4omN3ugP%itlGQ5ZQ>{'W.h@@Pޕ_ _GZCLN Wt+ɡfX63|-8k/@|jn/Lп6L_1^ykO:%^P#F ?_GLqX{D+gA2ڬꃨ CG6{ 5ʒɠ"## Ce2dN#f9y7@Սr1` NrP͂EI!2FR2е,9oy!;֦yF}HI0#7ݨV9) ydttVg;A 8(8MrXlDE%ӂgNJpTךvIH3 25t^UlY_W;LGʷxfxFt$<7o)A^h&xݒz7.@d8^uK2h3Vc3$3V㩌a,[B se>cMt_rjɕΈZ-h[ZZh͛z<[aM1ʄCg K21k@G:[=Ƹdiձ4)bìdPWjnϚT[* njCf0]ZBei< omzuDp;2' fj_]z@~8nGc?3n: <5>d&hmkȆzG #%IvWקo]c!(Ml8O'ԫ1}(u,^]ߖwGj->7W'WV-zКI8vtp3E17WW*N.\?yMJsT/,;RC)x@I+᧗] J7a =!Q29 YC[eA)68կ}E/I-u T`!G1C:Éu 0!ۺQx8^¢ }/:]N~zy`KRa%LM(U^:U ZAi3O=|3?4^Ry9J%++AUYAF X˽&6Omſ! g_M{=&Zv :VʫX9lيՆgmͳZm-#ƵNpX˭6}\7W7v:yvmXּ9NFV)nfx>` E_wySv(;#fӛP wa)^Mht>7Fv䧔o~Z;.̯zwpCZr6vƔdN޽qdZŤxD`{RT#H'Xi]׷:c'],hd)H[XH ̌Q"Z M \P⵲N b)#r.XZ.pXH"m<6$^hrVDH٠Jbc#JCT,KMAsBh9Vǥ 6S'L& d Nցc*/:!kVBfvŠ.ܾxYTsEv(~4 !hpjC`rEl\Xq[IP*Vɦd:NRj3Hx߷!ECb3TEj4n4xG`a``J294SӯM.QsN>N u5N)s?̣\wFWMo6mwo3Dr9Ab|pCEYJ$bãnƖ@5%PhN `JI:Cp`+0Up(~7M#>nڠvY͹İ{\EG"XoN/'l;FDF`qH#IpyDk!`?'4><2j0cԊ3ǧsaKRh:̧7ǽua_ST#q"QCР!1Vcwڤj_'߂4v!MT.@$7!}l7vH |cs"[pEÎ[rܒHXt FwjllrNH#Y%:}M㮆Ww[ " !x$hPr&K$[rHRRb3\#CxN{ JA! g '"8a#EiM=+ x nt?*&|(×Bo淳wB@h$CCR C"/9,c\tg\8{\Փ#J׳ "1z:AS"`A@##P uNqdȂZSu!_]%+1Ё% I:Q5K1HWa-G XHXttĜ>0F6{Q^=2~ ӢdE) t qtftZd*=?[Gl1WY -߫}LS?sz^V-Ys*kjNVZhO(ʅk cⲊ!qpy"bB;6زazrGʩHKfTݿu%dݜmj_0SmA ۂ ZC5AZȚ ЭA着Z utf4 ܚ4? mbWl+!kmbf'_}0hl&mE~s-<9ͪ,'_M:umO]dۂ6M F &sWvB`hX0:01tDDS9hc_30c @Ly# Hj("=5}͊nG> 9OgKwc_׻|å@ߕblU.E-寨A^5NycKƟg_Fsf|0ti+xs>S8'A:Hȧz>\x AAw]-l( L׳< (Ϗc5 T`8(0ԳhsI(AqЍAP=`45QhMswo%4 P%mh |(Ǒ䛄etSFa qzsFa|`oIJp9_TiQGEɰr7j؋k'{77WxOV5X3KR/uQ.<{X|`fчu;1 TʶEբm' Gd~o_P1 M۠"k ]`9Ȓբs)kqլmⅈ́C@ CS2">ƹZD a"l&ڒR `<7.~JLQ?bT2T .RjKEH)Dk|2yD0ҊB.er>O_P6p@buhIZY"X+Ե%ΐr]\/6jb9`jڿsDiEThl+NGv%J<Z ZVύOokxg%%-a+EQQ"\ZUs'I)T͆Ѝn H*%UW+DZ6eIvFúӃVzJli9w]VzyTV$`ީ$ˏJ$#[FUg0Gd<ܓkɟ"'C+)I-L3RN ņg`h <[ }]DpyK'OCdKmj|5=cZ2KcQT_ ߻/*w9W z.=c U^]3xAS]zjt/nvCNU͐bfa@;fzic_:41{[6e2 J& e)&I Ѧu_CQM9@$ָ"z])jZݔ#R+RYd:B ~/xhC~vUӻ>zjDP;] ʚR }dHޤ:C!!U;? >SM_AꃛYFlS[ճ7 뭝`+=^o]~D2@ׄN{y?ڪ=#>f |ͬBALQQ?{/dr'm"(fT9#dtk"4}"&0n?F,Eߘ,?*̭gI/Ы-ݽ\(/ĵ_ǻ@SH,U/H TR&rt>t%:?A9[ x;w|{כOz 6ssbds+*#>z!=:VxLpKY t yW~ʩ9bQ 2WJHKtJ!".ShXF5D9E4zcbL;AT!Y q((ץ6D3|X^6QQE[\TLjT cbDuLxY A $E (i}0##OpŸ+%ʂ,By.JZHf,_*'߻4B72ͼ7!gcZA5N[,uƛH@,4,/]EU^^ogŷ#LR<4!uQ%%^,.djFt)^(\-Fpp+yA+4( ^Cڠ-d5B{Uwz$1I8L+,|4F:ꓚ9qHX^irߘ;a<ׄ%WJdR.C8OaPJ0XbZ3zM`z" XO$Arc2*bXfPVF "]ga)ug$z rA6 oBDˍ#T0Ng+D2$JnH!!tUǝv+L?a#>BG^?,~~?H,9aKJf<\{zǜ.ٗŠ9bd"scJo@WG3gc:ij,uqKgKho06z8![ī1kP/ꢋiA~~7KUWK[*T/_nhm18Jz6LSՊA46˳K?L_ǟƓ/3O'$`yX@3u'51*]V_g.XDvpB"rkEZ\R"4d [l݋8(mǯxput/if=0#w̿g/tDLDcLSeT*IW j˂a)zY7+tf =L bPY$H|;}o@G3 l4v //zWSN9x־Ee^k:֩$=cn8;n%OW;U>R"wcN#Np0DXi`b*ǔx 3aÚ2˨]B3qF{M006!0kQAl eAfZRW $!$?)FN1 <-*Bkh0)Qfw @)"kAQ̬`þL3d1GtF7c"hE" zBVB!(80Ճv "@X?뚮u;J)zTǹސmQ^xANxh^:A-6Pn{o$i5Gž A&'xZ\b*ICcs~qtRH4ne㟰@^{8btuWLӿ;=3O3( {7|GDJ8?$~Ba$_؞\ ǫu 5ߝ`5k%Qexz5欖k BЇc\g`lN&(U _q;D(Õѓ ,uaڜ&^Z8{@p&-Z^",+6nO> l-ƓatNHS/5t]vd?#D\A;>M=zwmA =Ѹ&|ut&M޿9Z*8HNyx˫6G-^ is'jsF/EOp:nK~l$B#^hyɑ]6&ōD`_<{JUJ%Ph3.s9!²PɃ|ٹ5Yhw66'VѾq?1x"x`^rM!ڃW Wac"]qi$7`1²]"Je:ؐˠV)5+H&v dT-VF,Pwuٮ\BdP"J^T <8UXOUEBݫ7d r=ky^WB$mWV?KQ  kr&diQ`80^a wNg?:J nk~i{äyu{M)wf<\"'8z2I*~?| 4*wS}n?+ wߢEt.?*@W^e.nH[2M$&Ɗ-kKAcaϚ;>#\bϴ-?O7m%r bsQ8w6ج33{[$믋@XkR$r-*p ".ad5KESź:aLSc*owצ (& i$,ڟ!A|ƼҰL(|&}Yx)Dkbi’į];:h6a VUjp-8 5Td!2q,'fJ̭D*$s"cHeD F$HQۓel|2|&K_L# ^Sb rI(ص_" q03:@+QE<( otGIZ[3^7R|ڨ׿8ɮ!zILyyyySᲗĺr c؝\ 0L|%n΁KŞ?SQyKEK׫ 1rQ2>oڣӂQzg9zy]Rׁd ϷmPE5sq~خq^Kc|9-]\㱩l}fx*Vzmn96(xnVƫ~[Svy-?w]6֢Rpqg\L8-?P hZh,׾DR#mZm&kyu靳n3T6ޒ`M;kɸXj[l&%+Ь-$dA"iwrMyT \ ~W$~{kp* XudVR83 iER3Ʊ)_q~-*6+VGKJֵGרRp|m\)t,2o?+ɬ[zfsj!2cT0|`퉱^cTPr)I|=pM1֜2ǵHUM@ܖt;-NrN"I&WRu Xx$A-UR2jB$U҈tTJj>vUqe4HE",.صNrUK} sXaL9xXٛv(\h.F0+%Rݘ^sSwx0_LG?1,gE3Ld(yE㱽h,Hq}l1Ї(ʥif6pGk,kbK)X5 )W{WCvi40 H6á`4>Qm A\i{LV\tMz"(s ,Ѣ},ׂUR4OCb)`l_yR'ܠ01vZoq4Z,$JJrde؍6i7n\t/" *b9ё~'(0& s I.f/ Z sV ? 8V\KJIKت+n{i/+.2_c8;kii_Dͨ1 V1磸ޭYlR?kQݖ0շamp< p*E*n*Pᙒ"pE`$$˴aYw_Ta[@/^/ꒊ!KlSͺrnn 4`ͲtW=/RbA}8̑4H D ZSBnf&j)f8Y Gpܺ -h*+!y"~}ʻ$82lA^%QbLQ2)FȲXb{ZXr#H Y2bPþ]ƀ)@Y2),B5NU9ƌ@t_S55|֜y}ȲHM1j;l~2ϐ!g=yuc߷3 T}ySy_FG$kH5؝ӫ}}{$\Q}(}/^Y:p@̈́1ӫ=z#I1q=|a+QkZo}g$X(^]^,Jص2C r T[eK oꦣqv+ou_g?}*p> ρ!y~f: s=?xݮӭ`pI~ bJ`>jܥN:כJޯ]j? 7`w$ak]]tNg4: i8 '5^$ʚz7O/>*dQ5xtW6xx gֱf9{H]{zxtBD?9gg8\, 3P*S%{@kFy9eIaӓ¦']6> i1&:ɰf;H\dA{!A[2 h/qGc.03 Pc9iFz|𭇒4v|wk|]\ ІT-(hgO{',`KsrvhzEIn7"vFY?wXwy<Ŏ|B` _:<=V7W^~;DΧ((D/ZFK96֒ Z$b MWM>iRHZx @U N*ˣapV4Jt[! n PP{X@4+Bops&iHp dmm>ulY/Sf~Lo-.ڦ}7iz4JڏӴ۰>d?Hݷ4 }{O}Iz}y7hHzʥ|@ j՞@ tK@hAZȉd#Ea,A.~; zossn*o؊bli2;0l5sInęET'FV?u[}`9$)KXlCehQDX026Ij"}>/n޹O jچ9f۫l^:,ܘ8ka>hMr`&ɮrHӊ̞nzw*iأ-Bgk9ӫ9c#͎M+ 4GMwe=:t.#$Lݩ):wT6E3C+VF~* j"5@ Uʛ5"-Zj`- 6f,vd-e[d,{VDA/8Ac1]׎U-VoB}7Vލ*ş#![ږ[~F/7AU)Hkj:0I ZiZ>F8;RFwÛqY>8R.ߡpb}wXIJ,vKb˶Pe3mt3b͇N.Kg+߳ZVKtӜ] ːl45WgvҲjUk)+@m")ghbѦHNdYU.Zl{ՅRyތNeYXur|;ߋ\;U~e8 C Y5FnFx{5Cϝ4`.kBo+6@%_S[Ŷ,nKM<9a3Ý_uAҶn]}[,׬U0oECKwtg`>2Oanw iVyy{r}H"y(Tw}O\;Eh ;~Gw#,ݟode#{T\Q *,=pŊf1=|AM"UoO6,ß<;3xppbLB4 6//`,]O?))3nLFX=51 >@,cM&`H|ۄ[S .6[e㊿ @<^k_qx{[=};7Z!2 >$rΆ6d+DD*9ccT2+'Rܥq\?xoHưkQEB)(X?mIVr "b|=MBo-gHO9h*KFPdcp՝ME.% {a73M&g퀏L,65k)J|"_SVj%n"n h[b]\yF2b5ABb/cB x=Edוpxb{lmHOǿϽX˖ʔi[ֺ9+91Yծyź+5ä>zybƦtNlHXn+ XK<\/qP]ߢRei8*G]hzdJ|kDsRE6n4 CF3:IM|4ˤbk|!հ|0aVf0By5c43nJZmZ: +*ĺ;KeFd'1)2gj]j=SFqPN>S1~8O{ٸ>]90_<*1Y'o7gEGU},*vL? v.Ơ|!T@6$alL+6V +_$SnS_!38\/b'ұ\jR U `_Yëuҗ_#}b׈JkXU~W?" ;E{W]4`1ͯJU'ޓP}'S.}?\?wB=>qy;N:z:/n'OfOE;[g7XHݓtqg=Ny[?s۳w\g*YP/,)C"{N~v,DA1bs*쏭#Q~9AjOud=d|t^`؍f|¨EVPMMےk P^Bl$^3gSI{$rU٘I|}LjӔao\@ pj4p~:֟=59ȫQ$FI{S[%.|O`QSe>ȥ^4!B;%3;3(:jj)GZuA ж*Yɡ3)޵#beqv!K< pgٗl6dǎ="%iIm!Nl}bX*֜;OBfTҲ0r\# ELxl7% cw&=J kca'1[7)֗;O9eaB1Rm$F2Wo6Rt!S\["॥%.ZK%QT9,I Kw$O`6fFi?Í.Ì9MQ` 0B'. UPrCb U#)7Һ^"ObmˮMbaT ԩ:13´$҈@N>?M B*h );| YXTԩjl{&\JH m𒋞SA+`2{@ {Nj# #YS1{tAy_ๆÕ|x{‘M[KvzT!qk$:NP꧙'q.Ƕ~ԇ|pO['Ob0:|¨LЉͿ̾ñS_†A1;Wi,@w޾gP7Wp0I[7@ew!]@5aGK$Xܿa_HjKoLqp ! qǵ/"rc,PO w=_";z'o9281<9 72J(;j!SG(B c F-^55#n)YQs8;6e1O>ƫ%Z>   NN6B$K0·sLbTQlXK,"Q;w:SfMC5cPS>Ax`ecy'٬7{'I`wSK,Ta4LŅ03a f0試TxByt!+BHAv]:/k uc lF QIӽFƏ5~qS,7k#~b55 7swFpHx5?w ߡ CI72@#oCQ4g<=аBҖI p"n DZŸϚҊ/_ė}!ILk[wzoE0PfځEDPT:JCHi#D -rpIDR*W%$Khek]yli`<:nOfu8YonG9<T/Tz_7on/W:}f  T67o@`O*jRao9+3F嬰 eC#R2d żhvh UܖotWt;NR?e64xV=D.a:!sd`|nLxf';,ʞ0iuAN8W==+ |/q"kQZfnJ{3O9<`= JEL-8)1XY0\Bg `+4:<>n.AZ=CذCDδvvჸ pBRGj +)%Eh'"%6+ᜰH#-mБׯ)}_u.?lwwgg7f  S5e&U؋`/bcγTImJT+4ߧ6 T]::ovQjǒq2Qvy/7Q`K`r8:eBXM0uBj'BʷAy{ec0U*YcT<㘕j-] BWmͧ͋gۣre#2 +6XK8 wLj뻮!_-n͙5Rj<PKݻ 6dhscU5 V0ӱ>x* ȉ ׽``KTmSjXemŶJԇ TPm{VI#L(:1UN`!ˉwjq {HMA#!"W!6V(e+(2_Gr{AlFr{E:8|x|g<] RBN(ãYBD W }<ӃY?&0*`az& $_->@>d`RWJGpl1qFH.C<$kIޮ#v9,,HѣN%ӌ* -1Za Jud!BYgFW"}ݓS~IA,B~2P#Ua֑G֨mRD h5ZrqǸMC5 z}GYPT2V^k FSAhaf{ 0,& (RR\8K\r@^kgךHӞ6y,#!^cίa|ߢj}(ٵ.w.֨K}/^3.qTӛ,Kx& 1m{kOڝJ.^J G}xzIl)}M/>1bNڛͧWatuY|PFZc61#_p8{/Rǵ0MT O'lsX?A$G6G+0$>n$KĐIK}֨ՄC6)(P('u c-HL=q^IXY8n6%cj,*C&yւT*j$MOGMQ.2^+--mx[6xmJ~iB'p5gp^-]n1lζw μ=A^)Q߹`#4Rk-!r51S AڹLT(.%DCqI7DJI[I I.pdqt"ӃSX?RՄ҉M`Ld[pL*HiQEiQiInoOLwPӫgW#Q|)c<;<%!M<Q)89ҟߣ\rB<709 ԐiAv2-~x> e1[\wˮX o:vȟʹԒk ^S,S/XdHy-^l߲ p;Wkls3jar&.+C@]\(jyru~hBТdhK!<1IIᐒ%(ILss{yC;w 0 iO;z $L,**"PR[$mMl@ZPPi\7`Jzs-P (z{Y _K: Ti-X+@Bc/BT=tٳ^{BnT.ui-a௙?57Ԣ"D2g{ҜlпW*kL>Ǽ.W]sfJ @Y[}мoXN=f-0[q>KDfz3MRhthҁ ;'[|򿿾M=%?eu]XG%#L͈fx$3]3(? }+1CXgLaR4w-s}D †#fOFv!*"SHD#wGFۉt̄Sl5qEqӪߒ1X,R &}^J̕3機iE`* 13'87OʢބE7Cfj8׿⽝(68%O:cGa/@AA }&ۃ\ B&.>],9˲4\!( ~,!NI)ߌKkQ4I t68u3ɞynJ ' 휅 /99 `Se:xf\ ֕NO.-=L-viTi8ZpQ Ʋ` ;O=l]a*h9">فl51%H\o:BD@`,D'c^Z'՘sЄ^X`}Pxf@RR4N@O1ٽ;3;V8̜i%B ŕ w+` B2# JE1y0I^`HC̴#g-_ó4TyQ Q{Kf Cqyb$5I)vc=ۛp"u{ڌXbkvwÓּРo7w??h-ϰr0ܫXY!<^F ?gVrUo//?}P;!!_4s0gfq5?B(VTk8'g[v3<*U`th8?_\MOϼö #H{u`V+0Sc&N\+ R6pBTR)Qǐ(*kIQAq>25e.##I_vfc`v%zbc3 F"lA#1}q4. lnj4ʳ+-#"5RCNj* h))ᄲuqUZ%u86*XSIe3$[C06e \XA@t+E tf!Š90!^٤ -\YE(CxV (\AQҺ7MqfPR*Ŷ[ARcJ-)-iR[1@,)AyK +MC"ϒVl?a,uK: mcBkr'22fǷMo?6TwvJ'"+6RL@fNUPb+Y֎xwu CeT@y<6VuKՊo}K|[x~{Coa=7(EDBavфDi۹h ؞n պ&";O7PjU@᎛*R<9@:/.6v!؞T. O5dխO5W/!TzJ+R:W9"w,n򆊋t›ɣTH?LK'ww캅5?_\)]eR ;*ӄ!<~U<]'_<:.xv72 zT [%=qqɇ0J|v$!߸)$Mծ]?vkA}GvD i;c2[h+{M)#jNw4nnj"X(ݧvk(Su!!߸-S:yի};kN@Wry~o1!J"jDxK~̭m{SS$loIq0y-AZ wy=lL.Fr˄@^޽&Z$ZLxaVӳۍflf'WT</e ^koj27xw&铕'lbYNn}^.6?x_<=>Kx)laGgI,Lk1Ec0$6"j'͖;Ԇ)Q<չB9•%SIJMuax ;TΩ55M>Vds?٩SQuliEV{aG~0O&,]spֶKp, WL!A3޾4SEznhNu:RqTז2\RXcх 0γBiQ: )VFo^1 ]9#n"ԕ`qᰇYLF, \c^~hM`|* IwSFڳ)ױ`V  E R:ˢ.vc_5N˜6k,=HcTV"Gl寜,xCM)b># 7de1V;~BgrؓXY᝽9X<=+Hǧ$oa AR?Zf=pGKuo9loV2zw}ϥޡ&֬Ӓl,P^EX!~'mHChoA>M&j4w$B$;J$2W s9X:sLzs|ե 6K#9ćl?OEȥ&6P`b(‚K,wV4V9cXQSF W(CxAyŎwW_o͝~|w{X('!@YUg]yHE4Ouk}'̷YLɏ ~ZɢrJtr/h /zIZj7-H#(aG 1*O9tWڏJQRŐQ09(ѕObvl{8(oq"e/~4Mx 6=d0q.Z﨣#q6Ӽio @?fӿVVNˠ{~37yݟnh5jnӥsh1/=oƬj5?"`"uCٹ9CΛH+zy'qVэ9 XAzp 9ڼtW̼K0Ӻ5[cqSCV2J6a X̨ocN5(u6b y]Sm􋆍0H&_qP)o |ѣj2@g"{zb;3o׮>WZV&Y^lFvS^s~Eb7 \MC +sͪx7p^^y;gw[ÀKdIW|FX*BijnR3xOKeɺbnIGa=PξVLh#fп H?'Fv`=':ZJ3xPIkMNur>zӍqD>k(r-nmyfD %)ms$GFH}UwuͻM5:č&S&X3 [N)];xM*$ݧbת $F 䏶t.ɳv]?(U_q>RR0j5~7S9[P髿ullQi:SFo.[\ *ZR)쾬IV¸fBrJeNmXK9**Vͬ%YeL mMf!HGZG:ħ.R'ri#S4Av5Y'dt QDEJ `9p$X7t]#4뭪u{_Aڭe46rGKBAT*$<*,H?+ a-8A Z9&qDeRҵ"Z""ޠ6`._5ݨ.k!/ +cOƔp z1Ÿw|n  SfE5ah7+oI7Jh6DHj."6X~`͉)E:%[$KkZe ~ŁV>Xʸ")3+Qb!A4rQE%2;n;VCiЭi-X2M PX{y4I'0I aNúkRQk=@H 2@d5t2#2!svU,xADp*Kp0)D:x3- b29 ,Mt e@OPbs9h*+(:N=id'@b*MpV*: q}XeJajmIR¨S3u9# ?ascB+ͣ )눑NZ N|J5%f}b9!K &o(+^Xw/=IČ &;cBDaՎ2A~cE)TH`&}+Ǔl;GX&lA3 vI6^ &+K..W0[O2bȩ@TQgDV&ߌ@:t ɖD'evFRVrIYP+5&(7#W#FeM>*-e*I]%H˳̜C)Hq=Ox0ҴTEvcrA4L`t$J.:އ›p0.De̼J|4.\><m8-֞ߏ@sPґI<|,n/z}z#z@cOͺ=e ;iz 6aҵݍ\X ;(sI2zd&M]Fg衈ZՊaA Ye u  >hPٻ6r$8`)Ud1|;,{;ع2gL2sld9VeG `[RbMsa?<[﷋"X5X2=ɞz{梶{'R"ϟF3rPr2~tC \3%HjA069J( 6hXy '+ˤS$Rd%hCR~Oj1ZOwɭ]?HweZ=lp?INnyf*o|]OtṃW\IyqqBަyI?r'7<Z(}Rl;+m)!0Ɯщ+KuWw}wȆzq04z;Pi$|?]놭G-G5vn͵xݱhnp]yxr7b{'Lq]_5>;]?qu-Q%VúKo jF_~Xֺ53Zb7Oy:֓0d$pNߏlJZsC} k.4ߝΘム w!=y`bWlG2䓅 QuȮku+e) YZ0Kļ)0u~}վ\^x.']w|f .paE2\>{]İ%5֨;Ȇ{0?(P#D-{8JOkIVZ]Q9ٌ<*:dI:^GzOrKk ǏԛkilV53$?4Z{CLΫciɮSduV< *]|?~&OnW;,oo'7B±+L}: IbΤ4BZ$eG#'cA>w_5MR2p$*?dzCNK-sD5ML3BG&gw7;] g =#ւcmUV;fg#&|>@3Όo2^))),-d7dINQ: Fi&5W.D'E(ٕp H F:2JKF >Z'oDz#.j-SdhfEä@@8p#<tA~-{uhS1)s+1|2s2BluSٝ~-90j}qga(A GVnާ1zίS?T{\NeGw$Sjٳ֑>­y7w; V<=NZE{ dyT8ɍ.9c,B?P"[Jgml@ {uPC5$9r5G;-x2BDw|A |{TmR+OtAlOmr] cs]$j gTyAF uRœu^ /' <Eoʤ ^^7?j![鑵>\ntsL +ţ(G޺93a͡4!^cuRN:ځ&-9܇n mn*TJXZ,S [UFAm@ 8EQ -*es 5zWh5PJamխwRhnvF~ZJHiWjdA5DmTgk dkk/{n| AkwQ@o#۲0q=~hHF!M4kC>c]vOfG>tR:`y,8WDǙ'hFsC2{w猏k#1'31I <C_8[qh^.{TW'LXΟܚ#ѕMʙxv琡fj ==µ@}\hr'Zvuo*8EBqɨH!DYgW^)ar!dv.jD-jD-kt 9(6QBZh /`Q(K!G4A:p_C"⼓9$Dp&@P6iPe+[R17}@Tn} J*e7?jM4|cBKPK)pԤ ~ 9g;h?,ƐMvf $bec*.ɷYce ɴPчĪZjݢ}`("gqZl, & C>{Q.^G 96Vj/A/#\~ҠyiI|=)ɧJpyaɜkX;{MX/R%;":?2`sςKARhBHpz݀#omKJȂ_e+O}2ͳ,yWC~|LGK@ɺVsڂZ|4~,Vi2ca3m?@Heȇ E0$GeJKíJbV p/ハl Y_ҫCo9 6Y\|Y-}V1ieZjBhTVypӭ6R -9 XKވ9ܾ^LY9mH $ 1NGʬ%EH&U`sdI;rX-E 7߬/dW!H{_͑ _Fg,L$cbAX̀[PpY_*lr h]P9N,L] {Ճ`.1ݥ75Lp _M=wH&:⢠) uw+R%'zZ&NKFmuU9! f_AZx׺ Hz,K|OjղEnӻ!D_]6}:ףeDo|0LFdZ).M 4 ,y@ @܉Pv ՉFpF}0L&#NXʏF&MKWJEc ,\Qai_PפWHfc:ٍ*'C$_rC.%[%B*#ݸ Z"Qa'6[ qK:ُ)|T!-K=UL]O1 TZ YɱBY[HC5$ilCTƝtdXG[v[/9Dl4%Mjj&w;XfkڨlL.EyHg<3T&↛o֗v!>̞LѠ6!0eujIGӪ3g6p+| z>v_ʥfr)_cHcb?#K"*̡,L eCvB{,r]9.85}5Hf|vL˨Y("3!}VؖG{@c Җ_HHUw2g+%Fr =%&))Ð3hT@+K%##KƠ\o T J{},D&dZQ Jtg p 0n@`=1#:1 _<ORwBY@u֗GqKF#DyR{۶Wy=+wrE]m6}: C*֗[JЎSصS4pd|3ɹIA K͓$50KN<g4;|uvI "$).,8ə6 L M1Lh9e% 6g pM$(g vi:- c gMp,p|Bܫyf8[bY $K߰X-"3`JEm'D.64iqS BJn٩cr L.%+Q+܎>ߟ Gۻt*H-F*SqBeFmhhN^%OoCJPT5JINZP<ˋTMn'p[Kg/hpG2x(##!k: )d*SA9YL*C(#K"uwK:Hh,"K<qG kSW@C͖Q(x5rN)6sfegҔ@FM.fTF1p ciqbW5AUfԵoI>-b/AU]Pi~ TpIũ*"Ȕ^Ikw{^+(^} ;!~S>fIm@G7(\CŠ܏XfVڙ(EJ;ЂjO׿05mi㲩q긽~V!Ѐ^?+*LYud VC: x4͠ÊV[lM>LJTz{1iCf\Bƣ{Q0Q6-Sﰛn:cS7ڭEY=kP{lkܛsT1wG:E1μ3×^նw;*x?"«UmU5kOU[_\_srFR~U~^kv352;ӻ1>ݱ4?l7^]\mxZ?p #^4 wƗˇ|4췦{+-睄hbt!ukg8 QR;Nz2`L̳;y_{%V\玠lhKhq<gDbigܮM9}W`Ope%#iZ(^?K陞Qtk+K焆$EL١O+=D{whܯE]YrQnPoSQ!o / Ӭu3JLн`=_ (ajEп ^(x8zRA髓 +PR`'_C~7(|+W\+q) )uy 2!*ά1DY(ԑ@#RD1RY/%1 1j~jݣg_q IcTY~lyq3GPMQ9o;q`yT'N򝹂6+U{x\hj~sY+μ̹͠~FU85 sଅv#B%bH&cVwv9d5DC!\HTijjkdюHk1YlMh 66Y Q:.*Ҍo|^ LZ*v\vo3 懤HYz'Mne U|ofcm) ϿfW/Dyf(@rdt.T#LEFE ͗ %'EgV &`t*4wɩV!a< Ή6N')pT .Z)ҟ&[H' F5D]k4Cp3x5X5oE(}.(sqr J=Ws$h KW@#A8|u1~t]$'p`f?3~ZNdoqI (~jч _ 9tvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003535302115134432107017677 0ustar rootrootJan 22 14:03:03 crc systemd[1]: Starting Kubernetes Kubelet... Jan 22 14:03:04 crc restorecon[4693]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 14:03:04 crc restorecon[4693]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 22 14:03:05 crc kubenswrapper[5017]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 22 14:03:05 crc kubenswrapper[5017]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 22 14:03:05 crc kubenswrapper[5017]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 22 14:03:05 crc kubenswrapper[5017]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 22 14:03:05 crc kubenswrapper[5017]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 22 14:03:05 crc kubenswrapper[5017]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.110220 5017 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112777 5017 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112792 5017 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112797 5017 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112801 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112805 5017 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112809 5017 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112813 5017 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112817 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112821 5017 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112825 5017 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112830 5017 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112834 5017 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112839 5017 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112844 5017 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112847 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112851 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112855 5017 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112859 5017 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112863 5017 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112867 5017 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112870 5017 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112874 5017 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112878 5017 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112882 5017 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112886 5017 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112893 5017 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112896 5017 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112901 5017 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112905 5017 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112908 5017 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112912 5017 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112915 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112919 5017 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112922 5017 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112926 5017 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112930 5017 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112933 5017 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112937 5017 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112941 5017 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112945 5017 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112949 5017 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112953 5017 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112956 5017 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112960 5017 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112963 5017 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112967 5017 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112972 5017 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112976 5017 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112980 5017 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112984 5017 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112988 5017 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112991 5017 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112995 5017 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.112999 5017 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113002 5017 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113006 5017 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113011 5017 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113015 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113019 5017 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113024 5017 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113028 5017 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113033 5017 feature_gate.go:330] unrecognized feature gate: Example Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113038 5017 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113042 5017 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113047 5017 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113051 5017 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113055 5017 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113058 5017 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113062 5017 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113067 5017 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.113070 5017 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113384 5017 flags.go:64] FLAG: --address="0.0.0.0" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113399 5017 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113412 5017 flags.go:64] FLAG: --anonymous-auth="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113419 5017 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113426 5017 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113431 5017 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113438 5017 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113444 5017 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113450 5017 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113455 5017 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113460 5017 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113465 5017 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113470 5017 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113475 5017 flags.go:64] FLAG: --cgroup-root="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113479 5017 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113483 5017 flags.go:64] FLAG: --client-ca-file="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113487 5017 flags.go:64] FLAG: --cloud-config="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113491 5017 flags.go:64] FLAG: --cloud-provider="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113495 5017 flags.go:64] FLAG: --cluster-dns="[]" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113501 5017 flags.go:64] FLAG: --cluster-domain="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113504 5017 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113509 5017 flags.go:64] FLAG: --config-dir="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113512 5017 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113517 5017 flags.go:64] FLAG: --container-log-max-files="5" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113523 5017 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113527 5017 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113531 5017 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113535 5017 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113540 5017 flags.go:64] FLAG: --contention-profiling="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113544 5017 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113547 5017 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113551 5017 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113558 5017 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113563 5017 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113568 5017 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113572 5017 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113576 5017 flags.go:64] FLAG: --enable-load-reader="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113580 5017 flags.go:64] FLAG: --enable-server="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113584 5017 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113591 5017 flags.go:64] FLAG: --event-burst="100" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113595 5017 flags.go:64] FLAG: --event-qps="50" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113599 5017 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113603 5017 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113607 5017 flags.go:64] FLAG: --eviction-hard="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113613 5017 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113617 5017 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113621 5017 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113625 5017 flags.go:64] FLAG: --eviction-soft="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113629 5017 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113634 5017 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113638 5017 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113642 5017 flags.go:64] FLAG: --experimental-mounter-path="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113646 5017 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113650 5017 flags.go:64] FLAG: --fail-swap-on="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113654 5017 flags.go:64] FLAG: --feature-gates="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113659 5017 flags.go:64] FLAG: --file-check-frequency="20s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113663 5017 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113667 5017 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113671 5017 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113676 5017 flags.go:64] FLAG: --healthz-port="10248" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113680 5017 flags.go:64] FLAG: --help="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113684 5017 flags.go:64] FLAG: --hostname-override="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113688 5017 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113692 5017 flags.go:64] FLAG: --http-check-frequency="20s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113696 5017 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113701 5017 flags.go:64] FLAG: --image-credential-provider-config="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113707 5017 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113712 5017 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113717 5017 flags.go:64] FLAG: --image-service-endpoint="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113722 5017 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113727 5017 flags.go:64] FLAG: --kube-api-burst="100" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113732 5017 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113737 5017 flags.go:64] FLAG: --kube-api-qps="50" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113740 5017 flags.go:64] FLAG: --kube-reserved="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113744 5017 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113748 5017 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113752 5017 flags.go:64] FLAG: --kubelet-cgroups="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113756 5017 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113760 5017 flags.go:64] FLAG: --lock-file="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113764 5017 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113768 5017 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113772 5017 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113778 5017 flags.go:64] FLAG: --log-json-split-stream="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113782 5017 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113786 5017 flags.go:64] FLAG: --log-text-split-stream="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113790 5017 flags.go:64] FLAG: --logging-format="text" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113794 5017 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113798 5017 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113802 5017 flags.go:64] FLAG: --manifest-url="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113806 5017 flags.go:64] FLAG: --manifest-url-header="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113811 5017 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113815 5017 flags.go:64] FLAG: --max-open-files="1000000" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113820 5017 flags.go:64] FLAG: --max-pods="110" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113826 5017 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113830 5017 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113835 5017 flags.go:64] FLAG: --memory-manager-policy="None" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113840 5017 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113844 5017 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113848 5017 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113852 5017 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113862 5017 flags.go:64] FLAG: --node-status-max-images="50" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113867 5017 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113872 5017 flags.go:64] FLAG: --oom-score-adj="-999" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113876 5017 flags.go:64] FLAG: --pod-cidr="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113880 5017 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113887 5017 flags.go:64] FLAG: --pod-manifest-path="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113892 5017 flags.go:64] FLAG: --pod-max-pids="-1" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113897 5017 flags.go:64] FLAG: --pods-per-core="0" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113901 5017 flags.go:64] FLAG: --port="10250" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113905 5017 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113909 5017 flags.go:64] FLAG: --provider-id="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113914 5017 flags.go:64] FLAG: --qos-reserved="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113918 5017 flags.go:64] FLAG: --read-only-port="10255" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113922 5017 flags.go:64] FLAG: --register-node="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113925 5017 flags.go:64] FLAG: --register-schedulable="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113929 5017 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113937 5017 flags.go:64] FLAG: --registry-burst="10" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113941 5017 flags.go:64] FLAG: --registry-qps="5" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113945 5017 flags.go:64] FLAG: --reserved-cpus="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113949 5017 flags.go:64] FLAG: --reserved-memory="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113953 5017 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113958 5017 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113962 5017 flags.go:64] FLAG: --rotate-certificates="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113967 5017 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113971 5017 flags.go:64] FLAG: --runonce="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113975 5017 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113979 5017 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113984 5017 flags.go:64] FLAG: --seccomp-default="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113987 5017 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113992 5017 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.113996 5017 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114000 5017 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114005 5017 flags.go:64] FLAG: --storage-driver-password="root" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114009 5017 flags.go:64] FLAG: --storage-driver-secure="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114013 5017 flags.go:64] FLAG: --storage-driver-table="stats" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114017 5017 flags.go:64] FLAG: --storage-driver-user="root" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114021 5017 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114025 5017 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114030 5017 flags.go:64] FLAG: --system-cgroups="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114033 5017 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114041 5017 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114045 5017 flags.go:64] FLAG: --tls-cert-file="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114049 5017 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114053 5017 flags.go:64] FLAG: --tls-min-version="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114057 5017 flags.go:64] FLAG: --tls-private-key-file="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114061 5017 flags.go:64] FLAG: --topology-manager-policy="none" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114065 5017 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114069 5017 flags.go:64] FLAG: --topology-manager-scope="container" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114074 5017 flags.go:64] FLAG: --v="2" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114079 5017 flags.go:64] FLAG: --version="false" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114084 5017 flags.go:64] FLAG: --vmodule="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114093 5017 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114097 5017 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114228 5017 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114237 5017 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114241 5017 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114245 5017 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114249 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114253 5017 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114260 5017 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114264 5017 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114269 5017 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114273 5017 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114277 5017 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114280 5017 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114283 5017 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114287 5017 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114291 5017 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114294 5017 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114298 5017 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114302 5017 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114306 5017 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114311 5017 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114321 5017 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114326 5017 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114332 5017 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114341 5017 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114346 5017 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114350 5017 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114355 5017 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114358 5017 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114362 5017 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114366 5017 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114369 5017 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114373 5017 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114376 5017 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114380 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114383 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114386 5017 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114390 5017 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114394 5017 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114401 5017 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114412 5017 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114426 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114432 5017 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114437 5017 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114442 5017 feature_gate.go:330] unrecognized feature gate: Example Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114447 5017 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114451 5017 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114455 5017 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114459 5017 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114463 5017 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114467 5017 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114471 5017 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114475 5017 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114480 5017 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114484 5017 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114488 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114492 5017 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114496 5017 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114501 5017 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114506 5017 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114512 5017 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114516 5017 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114519 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114523 5017 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114527 5017 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114530 5017 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114534 5017 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114537 5017 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114540 5017 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114544 5017 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114548 5017 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.114557 5017 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.114565 5017 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.127999 5017 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.128089 5017 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128304 5017 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128337 5017 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128349 5017 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128359 5017 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128369 5017 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128378 5017 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128387 5017 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128396 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128405 5017 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128414 5017 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128424 5017 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128433 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128442 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128451 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128459 5017 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128468 5017 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128477 5017 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128485 5017 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128494 5017 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128502 5017 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128511 5017 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128519 5017 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128527 5017 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128536 5017 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128544 5017 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128553 5017 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128564 5017 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128580 5017 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128590 5017 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128601 5017 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128613 5017 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128623 5017 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128632 5017 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128641 5017 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128651 5017 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128660 5017 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128668 5017 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128677 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128685 5017 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128694 5017 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128705 5017 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128715 5017 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128724 5017 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128733 5017 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128743 5017 feature_gate.go:330] unrecognized feature gate: Example Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128752 5017 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128761 5017 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128770 5017 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128779 5017 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128788 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128796 5017 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128804 5017 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128813 5017 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128822 5017 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128830 5017 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128839 5017 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128847 5017 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128855 5017 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128864 5017 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128873 5017 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128881 5017 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128890 5017 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128901 5017 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128913 5017 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128922 5017 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128931 5017 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128940 5017 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128951 5017 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128962 5017 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128973 5017 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.128986 5017 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.129005 5017 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129325 5017 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129347 5017 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129356 5017 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129366 5017 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129375 5017 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129384 5017 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129395 5017 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129409 5017 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129418 5017 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129428 5017 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129438 5017 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129447 5017 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129456 5017 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129465 5017 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129475 5017 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129483 5017 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129491 5017 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129500 5017 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129510 5017 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129518 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129528 5017 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129537 5017 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129546 5017 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129555 5017 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129564 5017 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129573 5017 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129581 5017 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129589 5017 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129597 5017 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129606 5017 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129615 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129623 5017 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129631 5017 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129639 5017 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129650 5017 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129658 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129666 5017 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129674 5017 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129682 5017 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129691 5017 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129699 5017 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129707 5017 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129715 5017 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129724 5017 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129733 5017 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129744 5017 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129754 5017 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129764 5017 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129775 5017 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129786 5017 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129796 5017 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129806 5017 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129815 5017 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129824 5017 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129832 5017 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129840 5017 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129852 5017 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129861 5017 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129870 5017 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129880 5017 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129889 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129898 5017 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129907 5017 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129919 5017 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129928 5017 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129937 5017 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129946 5017 feature_gate.go:330] unrecognized feature gate: Example Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129955 5017 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129963 5017 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129971 5017 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.129981 5017 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.129995 5017 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.130349 5017 server.go:940] "Client rotation is on, will bootstrap in background" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.135777 5017 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.135947 5017 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.136869 5017 server.go:997] "Starting client certificate rotation" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.136924 5017 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.137407 5017 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-14 18:24:41.605363953 +0000 UTC Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.137575 5017 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.146574 5017 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.146598 5017 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.129.56.235:6443: connect: connection refused" logger="UnhandledError" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.148038 5017 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.154816 5017 log.go:25] "Validated CRI v1 runtime API" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.172246 5017 log.go:25] "Validated CRI v1 image API" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.173617 5017 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.176756 5017 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-22-13-58-42-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.176797 5017 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.195034 5017 manager.go:217] Machine: {Timestamp:2026-01-22 14:03:05.193258299 +0000 UTC m=+0.185720177 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654132736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84 BootID:ff9e96f4-5103-4727-9fe8-65027bb65e8c Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827068416 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730829824 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:8b:43:26 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:8b:43:26 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:03:de:dc Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:bf:b6:bd Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:13:5d:80 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:25:d5:c8 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:0e:3c:4e:b7:a7:81 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:02:bd:a6:33:d4:5b Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654132736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.195366 5017 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.195662 5017 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.197171 5017 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.197393 5017 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.197444 5017 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.197677 5017 topology_manager.go:138] "Creating topology manager with none policy" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.197689 5017 container_manager_linux.go:303] "Creating device plugin manager" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.197958 5017 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.197997 5017 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.198203 5017 state_mem.go:36] "Initialized new in-memory state store" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.198426 5017 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.199086 5017 kubelet.go:418] "Attempting to sync node with API server" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.199126 5017 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.199153 5017 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.199166 5017 kubelet.go:324] "Adding apiserver pod source" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.199180 5017 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.202186 5017 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.202307 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.202395 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.235:6443: connect: connection refused" logger="UnhandledError" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.202586 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.202664 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.235:6443: connect: connection refused" logger="UnhandledError" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.203327 5017 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.204949 5017 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205507 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205536 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205546 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205555 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205569 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205577 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205584 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205597 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205609 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205618 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205631 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.205640 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.206174 5017 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.206700 5017 server.go:1280] "Started kubelet" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.207171 5017 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.207173 5017 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.207265 5017 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.207670 5017 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.208387 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.208415 5017 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 22 14:03:05 crc systemd[1]: Started Kubernetes Kubelet. Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.208761 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 07:47:03.559283178 +0000 UTC Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.209078 5017 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.209092 5017 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.209205 5017 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.209247 5017 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.210375 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.210426 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.235:6443: connect: connection refused" logger="UnhandledError" Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.209751 5017 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.235:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188d1283a2bb2137 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 14:03:05.206661431 +0000 UTC m=+0.199123289,LastTimestamp:2026-01-22 14:03:05.206661431 +0000 UTC m=+0.199123289,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.210587 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="200ms" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.212379 5017 server.go:460] "Adding debug handlers to kubelet server" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.216317 5017 factory.go:153] Registering CRI-O factory Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.216366 5017 factory.go:221] Registration of the crio container factory successfully Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.216449 5017 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.216461 5017 factory.go:55] Registering systemd factory Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.216469 5017 factory.go:221] Registration of the systemd container factory successfully Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.216494 5017 factory.go:103] Registering Raw factory Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.216511 5017 manager.go:1196] Started watching for new ooms in manager Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.217242 5017 manager.go:319] Starting recovery of all containers Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.223426 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.223703 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.223796 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.223892 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.223987 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224075 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224184 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224275 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224364 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224452 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224538 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224620 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224695 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224810 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224899 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.224979 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225092 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225275 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225365 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225466 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225554 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225638 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225712 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225771 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225833 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225898 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.225962 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226042 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226143 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226222 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226328 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226409 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226515 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226590 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226676 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226764 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226844 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.226928 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227014 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227097 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227216 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227310 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227411 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227496 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227581 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227666 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227765 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227849 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.227940 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228018 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228095 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228215 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228313 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228396 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228480 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228593 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228713 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228815 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228903 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.228982 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229066 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229172 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229278 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229364 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229451 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229538 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229621 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229714 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229801 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229878 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.229963 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.230047 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.230154 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.230255 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.230348 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.230432 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.230525 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.230617 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.230714 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233288 5017 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233336 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233356 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233370 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233383 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233394 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233406 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233422 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233454 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233474 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233487 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233499 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233512 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233525 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233536 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233548 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233560 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233570 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233584 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233597 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233608 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233620 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233631 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233644 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233655 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233668 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233688 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233711 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233736 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233751 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233765 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233783 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233798 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233809 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233821 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233839 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233862 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233877 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233893 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233910 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233926 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233942 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233957 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233972 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.233987 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234004 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234018 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234034 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234048 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234062 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234080 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234094 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234129 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234148 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234183 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234199 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234215 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234233 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234249 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234264 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234279 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234297 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234314 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234335 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234350 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234365 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234382 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234398 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234414 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234429 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234447 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234462 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234477 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234492 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234509 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234525 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234541 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234572 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234588 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234608 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234623 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234638 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234653 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234668 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234684 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234699 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234714 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234731 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234747 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234764 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234778 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234794 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234807 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234821 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234837 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234860 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234873 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234886 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234901 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234913 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234928 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234941 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234954 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234970 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234983 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.234997 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235011 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235025 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235038 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235051 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235066 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235082 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235096 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235133 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235150 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235165 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235176 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235192 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235205 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235221 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235235 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235248 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235260 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235272 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235285 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235299 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235310 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235329 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235350 5017 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235361 5017 reconstruct.go:97] "Volume reconstruction finished" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.235368 5017 reconciler.go:26] "Reconciler: start to sync state" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.246269 5017 manager.go:324] Recovery completed Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.253301 5017 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.256394 5017 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.256440 5017 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.256491 5017 kubelet.go:2335] "Starting kubelet main sync loop" Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.256770 5017 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.257519 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.257606 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.235:6443: connect: connection refused" logger="UnhandledError" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.260168 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.261858 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.261897 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.261907 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.263638 5017 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.263665 5017 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.263685 5017 state_mem.go:36] "Initialized new in-memory state store" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.272144 5017 policy_none.go:49] "None policy: Start" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.273206 5017 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.273237 5017 state_mem.go:35] "Initializing new in-memory state store" Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.309357 5017 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.324244 5017 manager.go:334] "Starting Device Plugin manager" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.324307 5017 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.324320 5017 server.go:79] "Starting device plugin registration server" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.325304 5017 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.325400 5017 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.326598 5017 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.326715 5017 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.326725 5017 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.332939 5017 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.357819 5017 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.357986 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.359159 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.359238 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.359275 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.359575 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.359866 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.359921 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.360691 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.360723 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.360734 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.360851 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.360859 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.360889 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.360901 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.360929 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.360965 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.361466 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.361499 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.361509 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.361593 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.361945 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.361980 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.362224 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.362254 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.362265 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.362692 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.362718 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.362730 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.362692 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.362782 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.362796 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.362959 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.363054 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.363084 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.363928 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.363984 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.363983 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.364000 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.364033 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.364013 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.364294 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.364319 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.364952 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.364989 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.365002 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.412521 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="400ms" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.425901 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.427360 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.427417 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.427431 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.427468 5017 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.428238 5017 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.235:6443: connect: connection refused" node="crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438235 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438279 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438302 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438318 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438338 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438361 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438392 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438479 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438567 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438629 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438691 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438732 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438770 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438838 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.438868 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540178 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540234 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540253 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540267 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540296 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540310 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540325 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540342 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540357 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540371 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540384 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540398 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540411 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540427 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540440 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540458 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540527 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540599 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540596 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540627 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540704 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540733 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540581 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540658 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540663 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540688 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540709 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540661 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540711 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.540658 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.628477 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.629991 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.630037 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.630049 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.630077 5017 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.630577 5017 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.235:6443: connect: connection refused" node="crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.681039 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.700956 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.708512 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-827690a8237ceb3ee8eec7930446de4ce7010ee71ccfd07579a7adf3f02d96de WatchSource:0}: Error finding container 827690a8237ceb3ee8eec7930446de4ce7010ee71ccfd07579a7adf3f02d96de: Status 404 returned error can't find the container with id 827690a8237ceb3ee8eec7930446de4ce7010ee71ccfd07579a7adf3f02d96de Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.717677 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.720958 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-e99647e0262eed762d32baefb9adf0deacb28adda85d68b78eeb7ed3c1a947ce WatchSource:0}: Error finding container e99647e0262eed762d32baefb9adf0deacb28adda85d68b78eeb7ed3c1a947ce: Status 404 returned error can't find the container with id e99647e0262eed762d32baefb9adf0deacb28adda85d68b78eeb7ed3c1a947ce Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.726842 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.731858 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-d5fe6c1fba66b7ba4d079b62acba14fc33bba7040e5779de79606a2a1fe27ebf WatchSource:0}: Error finding container d5fe6c1fba66b7ba4d079b62acba14fc33bba7040e5779de79606a2a1fe27ebf: Status 404 returned error can't find the container with id d5fe6c1fba66b7ba4d079b62acba14fc33bba7040e5779de79606a2a1fe27ebf Jan 22 14:03:05 crc kubenswrapper[5017]: I0122 14:03:05.732398 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.745988 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-07ca57cb49f2503438b208997ae38c6621f61a5ac3c5f12db88324e8a044f394 WatchSource:0}: Error finding container 07ca57cb49f2503438b208997ae38c6621f61a5ac3c5f12db88324e8a044f394: Status 404 returned error can't find the container with id 07ca57cb49f2503438b208997ae38c6621f61a5ac3c5f12db88324e8a044f394 Jan 22 14:03:05 crc kubenswrapper[5017]: W0122 14:03:05.747380 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-77325fc4c1fc28c46426588537418487d3c7cf1ae62b9c7fc4d53972578145ae WatchSource:0}: Error finding container 77325fc4c1fc28c46426588537418487d3c7cf1ae62b9c7fc4d53972578145ae: Status 404 returned error can't find the container with id 77325fc4c1fc28c46426588537418487d3c7cf1ae62b9c7fc4d53972578145ae Jan 22 14:03:05 crc kubenswrapper[5017]: E0122 14:03:05.813761 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="800ms" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.031486 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.032770 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.032811 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.032828 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.032856 5017 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 14:03:06 crc kubenswrapper[5017]: E0122 14:03:06.033326 5017 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.235:6443: connect: connection refused" node="crc" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.208486 5017 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.209213 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 01:50:44.016346016 +0000 UTC Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.264683 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d5fe6c1fba66b7ba4d079b62acba14fc33bba7040e5779de79606a2a1fe27ebf"} Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.266378 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e99647e0262eed762d32baefb9adf0deacb28adda85d68b78eeb7ed3c1a947ce"} Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.267925 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"827690a8237ceb3ee8eec7930446de4ce7010ee71ccfd07579a7adf3f02d96de"} Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.271456 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"77325fc4c1fc28c46426588537418487d3c7cf1ae62b9c7fc4d53972578145ae"} Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.274727 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"07ca57cb49f2503438b208997ae38c6621f61a5ac3c5f12db88324e8a044f394"} Jan 22 14:03:06 crc kubenswrapper[5017]: W0122 14:03:06.281037 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:06 crc kubenswrapper[5017]: E0122 14:03:06.281141 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.235:6443: connect: connection refused" logger="UnhandledError" Jan 22 14:03:06 crc kubenswrapper[5017]: W0122 14:03:06.329477 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:06 crc kubenswrapper[5017]: E0122 14:03:06.329566 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.235:6443: connect: connection refused" logger="UnhandledError" Jan 22 14:03:06 crc kubenswrapper[5017]: W0122 14:03:06.579256 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:06 crc kubenswrapper[5017]: E0122 14:03:06.579368 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.235:6443: connect: connection refused" logger="UnhandledError" Jan 22 14:03:06 crc kubenswrapper[5017]: E0122 14:03:06.615233 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="1.6s" Jan 22 14:03:06 crc kubenswrapper[5017]: W0122 14:03:06.640256 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:06 crc kubenswrapper[5017]: E0122 14:03:06.640430 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.235:6443: connect: connection refused" logger="UnhandledError" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.833726 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.835247 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.835296 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.835307 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:06 crc kubenswrapper[5017]: I0122 14:03:06.835337 5017 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 14:03:06 crc kubenswrapper[5017]: E0122 14:03:06.839820 5017 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.235:6443: connect: connection refused" node="crc" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.208985 5017 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.209917 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 18:02:34.415793068 +0000 UTC Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.279695 5017 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="ba454cfca55d3a8ac6fc47a6635a7b89e8271441bebf7df2686387561d60e580" exitCode=0 Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.279830 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"ba454cfca55d3a8ac6fc47a6635a7b89e8271441bebf7df2686387561d60e580"} Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.279842 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.280777 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.280816 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.280826 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.282788 5017 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811" exitCode=0 Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.282851 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811"} Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.282932 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.284031 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.284105 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.284143 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.285672 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858"} Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.285708 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a"} Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.285772 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267"} Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.285786 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6"} Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.285754 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.287503 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.287545 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.287560 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.288613 5017 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc" exitCode=0 Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.288727 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc"} Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.288838 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.290010 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.290035 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.290045 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.291961 5017 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b" exitCode=0 Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.291987 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b"} Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.292305 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.292384 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.293559 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.293584 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.293593 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.293891 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.293952 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.293981 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:07 crc kubenswrapper[5017]: I0122 14:03:07.301875 5017 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 22 14:03:07 crc kubenswrapper[5017]: E0122 14:03:07.303343 5017 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.129.56.235:6443: connect: connection refused" logger="UnhandledError" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.208847 5017 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.235:6443: connect: connection refused Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.210213 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 02:32:02.792597418 +0000 UTC Jan 22 14:03:08 crc kubenswrapper[5017]: E0122 14:03:08.216487 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="3.2s" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.299506 5017 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f" exitCode=0 Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.299583 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f"} Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.299731 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.301457 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.301497 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.301507 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.302543 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3d89a1583ab3d460d7a746814667e90ce611803586438aa8e9023b84d422d6c0"} Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.304368 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.305897 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.305965 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.305992 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.314589 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29"} Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.314644 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e"} Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.314655 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39"} Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.314762 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.316646 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.316689 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.316702 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.318995 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.319184 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695"} Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.319284 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c"} Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.320010 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.320038 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.320050 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.440562 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.442398 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.442452 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.442466 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:08 crc kubenswrapper[5017]: I0122 14:03:08.442500 5017 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.210818 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 18:49:56.456872917 +0000 UTC Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.325448 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570"} Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.325505 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.325525 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb"} Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.325550 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673"} Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.326711 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.326767 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.326791 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.327984 5017 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a" exitCode=0 Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.328073 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a"} Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.328128 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.332551 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.332582 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.332543 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.334972 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.335013 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.335029 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.334985 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.335422 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.335460 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.335567 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.335653 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:09 crc kubenswrapper[5017]: I0122 14:03:09.335719 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.027887 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.210955 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 02:49:38.647495302 +0000 UTC Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.336129 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd"} Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.336211 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9"} Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.336238 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7"} Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.336260 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2"} Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.336273 5017 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.336324 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.336392 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.337336 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.337349 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.337365 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.337376 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.337405 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.337444 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.782845 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.783297 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.785604 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.785649 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.785662 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:10 crc kubenswrapper[5017]: I0122 14:03:10.787834 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.068639 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.211373 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 06:18:15.279331751 +0000 UTC Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.337253 5017 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.342856 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b"} Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.342920 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.343014 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.343189 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.343761 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.343791 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.343801 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.344533 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.344590 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.344611 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.344542 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.345000 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.345014 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:11 crc kubenswrapper[5017]: I0122 14:03:11.700730 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.212351 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 09:00:43.332210255 +0000 UTC Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.233608 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.345885 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.346372 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.346548 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.347043 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.347083 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.347093 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.347737 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.347778 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.347790 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.347885 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.347911 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.347922 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:12 crc kubenswrapper[5017]: I0122 14:03:12.519415 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:13 crc kubenswrapper[5017]: I0122 14:03:13.212591 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 22:06:38.601723935 +0000 UTC Jan 22 14:03:13 crc kubenswrapper[5017]: I0122 14:03:13.348898 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:13 crc kubenswrapper[5017]: I0122 14:03:13.348905 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:13 crc kubenswrapper[5017]: I0122 14:03:13.350343 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:13 crc kubenswrapper[5017]: I0122 14:03:13.350409 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:13 crc kubenswrapper[5017]: I0122 14:03:13.350430 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:13 crc kubenswrapper[5017]: I0122 14:03:13.352451 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:13 crc kubenswrapper[5017]: I0122 14:03:13.352514 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:13 crc kubenswrapper[5017]: I0122 14:03:13.352530 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:14 crc kubenswrapper[5017]: I0122 14:03:14.212780 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 06:32:44.425310422 +0000 UTC Jan 22 14:03:14 crc kubenswrapper[5017]: I0122 14:03:14.307399 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 22 14:03:14 crc kubenswrapper[5017]: I0122 14:03:14.307612 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:14 crc kubenswrapper[5017]: I0122 14:03:14.309007 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:14 crc kubenswrapper[5017]: I0122 14:03:14.309051 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:14 crc kubenswrapper[5017]: I0122 14:03:14.309062 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:15 crc kubenswrapper[5017]: I0122 14:03:15.213254 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 19:34:04.855081641 +0000 UTC Jan 22 14:03:15 crc kubenswrapper[5017]: E0122 14:03:15.333141 5017 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 22 14:03:16 crc kubenswrapper[5017]: I0122 14:03:16.208867 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 22 14:03:16 crc kubenswrapper[5017]: I0122 14:03:16.209176 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:16 crc kubenswrapper[5017]: I0122 14:03:16.210516 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:16 crc kubenswrapper[5017]: I0122 14:03:16.210568 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:16 crc kubenswrapper[5017]: I0122 14:03:16.210577 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:16 crc kubenswrapper[5017]: I0122 14:03:16.214043 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 17:58:36.223419603 +0000 UTC Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.015221 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.015503 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.017182 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.017266 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.017279 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.019522 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.215182 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 03:28:20.572181496 +0000 UTC Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.357673 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.358497 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.358528 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:17 crc kubenswrapper[5017]: I0122 14:03:17.358539 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:18 crc kubenswrapper[5017]: I0122 14:03:18.215426 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 19:02:02.148079528 +0000 UTC Jan 22 14:03:18 crc kubenswrapper[5017]: W0122 14:03:18.354099 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 22 14:03:18 crc kubenswrapper[5017]: I0122 14:03:18.354228 5017 trace.go:236] Trace[492759038]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 14:03:08.352) (total time: 10001ms): Jan 22 14:03:18 crc kubenswrapper[5017]: Trace[492759038]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (14:03:18.354) Jan 22 14:03:18 crc kubenswrapper[5017]: Trace[492759038]: [10.001766942s] [10.001766942s] END Jan 22 14:03:18 crc kubenswrapper[5017]: E0122 14:03:18.354251 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 22 14:03:18 crc kubenswrapper[5017]: E0122 14:03:18.444295 5017 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Jan 22 14:03:18 crc kubenswrapper[5017]: W0122 14:03:18.886198 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 22 14:03:18 crc kubenswrapper[5017]: I0122 14:03:18.886296 5017 trace.go:236] Trace[1334227254]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 14:03:08.884) (total time: 10001ms): Jan 22 14:03:18 crc kubenswrapper[5017]: Trace[1334227254]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (14:03:18.886) Jan 22 14:03:18 crc kubenswrapper[5017]: Trace[1334227254]: [10.001790343s] [10.001790343s] END Jan 22 14:03:18 crc kubenswrapper[5017]: E0122 14:03:18.886321 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 22 14:03:19 crc kubenswrapper[5017]: W0122 14:03:19.011475 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 22 14:03:19 crc kubenswrapper[5017]: I0122 14:03:19.011561 5017 trace.go:236] Trace[263979202]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 14:03:09.010) (total time: 10001ms): Jan 22 14:03:19 crc kubenswrapper[5017]: Trace[263979202]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (14:03:19.011) Jan 22 14:03:19 crc kubenswrapper[5017]: Trace[263979202]: [10.0010182s] [10.0010182s] END Jan 22 14:03:19 crc kubenswrapper[5017]: E0122 14:03:19.011581 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 22 14:03:19 crc kubenswrapper[5017]: W0122 14:03:19.056596 5017 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 22 14:03:19 crc kubenswrapper[5017]: I0122 14:03:19.056688 5017 trace.go:236] Trace[496719624]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 14:03:09.055) (total time: 10001ms): Jan 22 14:03:19 crc kubenswrapper[5017]: Trace[496719624]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (14:03:19.056) Jan 22 14:03:19 crc kubenswrapper[5017]: Trace[496719624]: [10.001305138s] [10.001305138s] END Jan 22 14:03:19 crc kubenswrapper[5017]: E0122 14:03:19.056710 5017 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 22 14:03:19 crc kubenswrapper[5017]: I0122 14:03:19.209860 5017 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 22 14:03:19 crc kubenswrapper[5017]: I0122 14:03:19.216017 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 20:48:07.156662012 +0000 UTC Jan 22 14:03:19 crc kubenswrapper[5017]: I0122 14:03:19.384463 5017 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 22 14:03:19 crc kubenswrapper[5017]: I0122 14:03:19.384524 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 22 14:03:19 crc kubenswrapper[5017]: I0122 14:03:19.389329 5017 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 22 14:03:19 crc kubenswrapper[5017]: I0122 14:03:19.389416 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 22 14:03:20 crc kubenswrapper[5017]: I0122 14:03:20.015302 5017 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded" start-of-body= Jan 22 14:03:20 crc kubenswrapper[5017]: I0122 14:03:20.015459 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded" Jan 22 14:03:20 crc kubenswrapper[5017]: I0122 14:03:20.216548 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 01:45:53.546435435 +0000 UTC Jan 22 14:03:21 crc kubenswrapper[5017]: I0122 14:03:21.216757 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 03:53:01.506552054 +0000 UTC Jan 22 14:03:21 crc kubenswrapper[5017]: I0122 14:03:21.645251 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:21 crc kubenswrapper[5017]: I0122 14:03:21.647984 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:21 crc kubenswrapper[5017]: I0122 14:03:21.648065 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:21 crc kubenswrapper[5017]: I0122 14:03:21.648077 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:21 crc kubenswrapper[5017]: I0122 14:03:21.648133 5017 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 14:03:21 crc kubenswrapper[5017]: E0122 14:03:21.654526 5017 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 22 14:03:22 crc kubenswrapper[5017]: I0122 14:03:22.217539 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 14:57:23.660194819 +0000 UTC Jan 22 14:03:22 crc kubenswrapper[5017]: I0122 14:03:22.254944 5017 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 14:03:22 crc kubenswrapper[5017]: I0122 14:03:22.333450 5017 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 14:03:22 crc kubenswrapper[5017]: I0122 14:03:22.527829 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:22 crc kubenswrapper[5017]: I0122 14:03:22.533640 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.208207 5017 apiserver.go:52] "Watching apiserver" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.211696 5017 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.212254 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.212683 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.212803 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:23 crc kubenswrapper[5017]: E0122 14:03:23.212873 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.213713 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:23 crc kubenswrapper[5017]: E0122 14:03:23.213898 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.214744 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.214399 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.214794 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:23 crc kubenswrapper[5017]: E0122 14:03:23.215548 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.217942 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 06:54:50.538594725 +0000 UTC Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.220534 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.220595 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.220682 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.220749 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.220781 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.220921 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.220932 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.220989 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.221415 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.252830 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.269893 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.281710 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.297008 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.309083 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.309856 5017 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.324323 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.339474 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.355313 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.370598 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.373754 5017 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 14:03:23 crc kubenswrapper[5017]: E0122 14:03:23.381148 5017 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:23 crc kubenswrapper[5017]: I0122 14:03:23.798191 5017 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.218705 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 01:45:05.383370212 +0000 UTC Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.376208 5017 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.379749 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.383061 5017 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.396107 5017 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.420070 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.432175 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.445654 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.458567 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.476800 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.483870 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.483919 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.483942 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.483962 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.483986 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484011 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484033 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484054 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484076 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484102 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484148 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484171 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484192 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484215 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484242 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484262 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484286 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484315 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484338 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484358 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484350 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484419 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484474 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484497 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484540 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484566 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484589 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484613 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484636 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484662 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484689 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484765 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484804 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484783 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484828 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484950 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.484988 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485018 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485049 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485073 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485100 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485178 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485199 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485213 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485256 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485292 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485327 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485365 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485397 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485430 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485457 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485490 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485519 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485546 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485574 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485601 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485628 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485655 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485677 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485701 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485728 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485756 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485781 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485808 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485836 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485860 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485887 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485918 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485947 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485975 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486011 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486331 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486372 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486413 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486453 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486480 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486536 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487298 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487327 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487358 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487387 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487414 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487440 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487472 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487502 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487615 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487651 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487682 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487712 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487763 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487795 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487834 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487867 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487894 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487916 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487937 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487959 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488403 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488451 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488483 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488510 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488535 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488561 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488586 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488610 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488638 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488663 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488693 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488751 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488779 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488803 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488828 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488851 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488877 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489312 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489351 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489372 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489390 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489412 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489438 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489458 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489476 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489635 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490422 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490451 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490470 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490510 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490532 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490555 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490578 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490598 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490620 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490642 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490664 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490685 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490707 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490727 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490747 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490764 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490784 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490805 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490824 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490848 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490867 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490886 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490905 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490924 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490943 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490960 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490979 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490998 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491017 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491035 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491057 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491076 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491096 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491136 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491154 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491177 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491195 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491212 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491233 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491253 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491275 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491295 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491314 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491418 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491447 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491467 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491490 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491511 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491529 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491548 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491570 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491588 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491606 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491627 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491646 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491663 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491682 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491701 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491720 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491741 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491758 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491777 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491795 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491814 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491833 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491855 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491876 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491896 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491916 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492010 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492039 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492059 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492081 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492102 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492169 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492230 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492256 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492292 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492313 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485220 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492341 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492468 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492546 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492595 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492739 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492783 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492811 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492820 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492909 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.493303 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.493329 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.493374 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.493956 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.494045 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.494184 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.494303 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.494391 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.494585 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.494904 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495040 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495131 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495225 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495343 5017 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495411 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495509 5017 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495579 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495659 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495739 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495799 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495866 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.496347 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.494645 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.497043 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.494374 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486617 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486627 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486745 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486791 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486907 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.486972 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487049 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487161 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487219 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487362 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487442 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487384 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487873 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.487991 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488172 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488368 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488372 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.488452 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489485 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489504 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489762 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489788 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.489941 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490011 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490093 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490232 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.490480 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491034 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491599 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491935 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.491984 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492263 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.492321 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495254 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495307 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495522 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495622 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495815 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495834 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.495847 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.485464 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.496033 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.496164 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.496475 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.496576 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:03:24.9965295 +0000 UTC m=+19.988991528 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.496798 5017 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.496904 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.496923 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.496997 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.497019 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.497029 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.498008 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.497303 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.497530 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.497963 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.498321 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:24.998298191 +0000 UTC m=+19.990760059 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.498555 5017 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.503463 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.503845 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.504143 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.508999 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.509076 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.509557 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.511035 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.511621 5017 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.511702 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:25.011681693 +0000 UTC m=+20.004143541 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.511952 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.512170 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.512210 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.512464 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.512587 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.512605 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.512976 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.513004 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.513054 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.513986 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.514076 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.514359 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.514637 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.529049 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.529832 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.530672 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.531129 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.531939 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.533992 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.535198 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.535250 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.535496 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.535572 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.536102 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.535407 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.538562 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.544682 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.544964 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.545761 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.545864 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.548360 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.548484 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.548791 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.550322 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.554553 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.555154 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.555261 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.555356 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.555417 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.555599 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.555878 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.555972 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.556148 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.556276 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.556299 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.556348 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.554667 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.556695 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.557011 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.557295 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.557338 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.557403 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.557449 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.557451 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.557804 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.557855 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.557955 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.558890 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.559270 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.559304 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.559521 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.559587 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.559811 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.560085 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.561673 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.562324 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.562549 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.558081 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.563585 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.563790 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.563782 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.564098 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.564191 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.564716 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.564719 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.562241 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.566918 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.562783 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.567007 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.567147 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.567161 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.567680 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.567736 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.567757 5017 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.567922 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:25.067839426 +0000 UTC m=+20.060301284 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.552301 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.567967 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.557200 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.568795 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.568798 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.568816 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.569102 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.569181 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.569892 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.570072 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.570098 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.570123 5017 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:24 crc kubenswrapper[5017]: E0122 14:03:24.570195 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:25.070174494 +0000 UTC m=+20.062636352 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.570375 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.570727 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.571081 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.571238 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.571395 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.571424 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.571456 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.571552 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.571999 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.576356 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.576576 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.576993 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.577565 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.578422 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.579531 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.580269 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.580598 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.580625 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.580692 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.580710 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.580895 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.581232 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.581080 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.581614 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.581678 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.581815 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.581852 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.581908 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.586009 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.586146 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.592316 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.596972 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597028 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597171 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597185 5017 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597172 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597198 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597282 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597299 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597312 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597324 5017 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597337 5017 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597347 5017 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597359 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597369 5017 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597372 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597381 5017 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597392 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597421 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597443 5017 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597459 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597470 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597481 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597491 5017 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597502 5017 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597518 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597533 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597553 5017 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597566 5017 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597579 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597591 5017 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597604 5017 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597624 5017 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597639 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597653 5017 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597665 5017 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597677 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597691 5017 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597708 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597723 5017 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597736 5017 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597750 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597762 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597772 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597782 5017 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597792 5017 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597809 5017 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597819 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597830 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597840 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597852 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597863 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597873 5017 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597883 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597893 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597902 5017 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597913 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597923 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597935 5017 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597946 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597959 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.597981 5017 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598001 5017 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598012 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598021 5017 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598031 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598052 5017 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598072 5017 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598085 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598099 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598124 5017 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598142 5017 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598194 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598205 5017 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.598216 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599340 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599355 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599367 5017 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599398 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599466 5017 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599478 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599489 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599503 5017 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599515 5017 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599527 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599538 5017 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599549 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599571 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599583 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599596 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599609 5017 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599622 5017 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599641 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599838 5017 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599850 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599871 5017 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599881 5017 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599891 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599900 5017 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599909 5017 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599918 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599930 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599938 5017 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599946 5017 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599955 5017 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599963 5017 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599975 5017 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599986 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.599995 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600005 5017 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600015 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600026 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600035 5017 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600046 5017 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600056 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600065 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600076 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600087 5017 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600096 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600105 5017 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600129 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600140 5017 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600149 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600159 5017 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600169 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600179 5017 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600190 5017 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600199 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600210 5017 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600219 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600230 5017 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600241 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600251 5017 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600260 5017 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600296 5017 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600305 5017 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600315 5017 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600326 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600336 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600346 5017 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600355 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600365 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600374 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600384 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600394 5017 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600406 5017 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600416 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600427 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600437 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600451 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600461 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600471 5017 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600481 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600491 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600511 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600521 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600531 5017 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600542 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600552 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600561 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600571 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600582 5017 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600591 5017 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600601 5017 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600611 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600622 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600632 5017 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600642 5017 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600652 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600661 5017 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600670 5017 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600680 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600689 5017 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600698 5017 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600708 5017 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600716 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600726 5017 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600736 5017 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600747 5017 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600757 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600766 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600775 5017 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600786 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600811 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600822 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600832 5017 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.600844 5017 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.605593 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.624404 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.628977 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.636259 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.665089 5017 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.701530 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.701566 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.701581 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.701595 5017 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.730498 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.739988 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:03:24 crc kubenswrapper[5017]: I0122 14:03:24.749277 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.004421 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.004521 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:03:26.004500268 +0000 UTC m=+20.996962126 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.005376 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.005881 5017 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.012003 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:26.009987668 +0000 UTC m=+21.002449526 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.085690 5017 csr.go:261] certificate signing request csr-69xxd is approved, waiting to be issued Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.099851 5017 csr.go:257] certificate signing request csr-69xxd is issued Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.106673 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.106709 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.106733 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.106843 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.106857 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.106868 5017 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.106910 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:26.106896703 +0000 UTC m=+21.099358561 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.107197 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.107213 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.107220 5017 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.107242 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:26.107235913 +0000 UTC m=+21.099697761 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.107272 5017 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.107292 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:26.107286804 +0000 UTC m=+21.099748662 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.137819 5017 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 22 14:03:25 crc kubenswrapper[5017]: W0122 14:03:25.139350 5017 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.CSIDriver ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.219585 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 09:37:16.313766741 +0000 UTC Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.257593 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.257627 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.257717 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.257750 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.257833 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:25 crc kubenswrapper[5017]: E0122 14:03:25.257950 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.262383 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.263234 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.264060 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.264779 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.265536 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.267014 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.267744 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.268796 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.269522 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.270590 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.271185 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.272397 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.272985 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.273659 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.274706 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.275360 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.276572 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.277162 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.277870 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.279202 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.279746 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.280810 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.281405 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.282556 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.283058 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.283850 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.285097 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.285647 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.286763 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.287340 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.288430 5017 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.288597 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.290416 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.291687 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.292242 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.293858 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.294608 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.295581 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.296266 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.297490 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.298033 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.299089 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.299754 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.300840 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.301358 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.302346 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.303026 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.304302 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.304903 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.305871 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.306461 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.307515 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.308651 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.309285 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.311839 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.345308 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.365632 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.381398 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.381831 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e"} Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.381895 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f"} Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.381913 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e75cd7bc7d846c1732e77e637338a9d6797c3a7036b08b4e47b24c661ca41a9c"} Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.383253 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4e8fdc2064440a13b3747c6d8cdbf095f3a007fee3bd08f005f0e9988c152440"} Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.385181 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b"} Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.385254 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"36476dcc630a039cbb6ea9a3b09ec9bbb75843e134891e91bb5dedd6ffdf2e8a"} Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.398959 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.413666 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.433064 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.450516 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.465229 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.478807 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.500163 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.519306 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.534984 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:25 crc kubenswrapper[5017]: I0122 14:03:25.553281 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.015532 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.015660 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.015742 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:03:28.015710305 +0000 UTC m=+23.008172163 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.015785 5017 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.015869 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:28.015844709 +0000 UTC m=+23.008306747 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.090548 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-d5kwd"] Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.091182 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-d5kwd" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.094894 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-j2qmt"] Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.095358 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.095361 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.095568 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.095668 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.100720 5017 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-22 13:58:25 +0000 UTC, rotation deadline is 2026-12-11 07:21:03.743756899 +0000 UTC Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.100789 5017 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7745h17m37.642970406s for next certificate rotation Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.103635 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.104020 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.104143 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.104319 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.104383 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.117375 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.117446 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.117475 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.117587 5017 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.117693 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:28.117669997 +0000 UTC m=+23.110131925 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.118229 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.118254 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.118270 5017 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.118315 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:28.118304925 +0000 UTC m=+23.110766783 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.118387 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.118402 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.118411 5017 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.118441 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:28.118431829 +0000 UTC m=+23.110893687 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.123894 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.156757 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.183187 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.205505 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218025 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-conf-dir\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218078 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c223e7fe-8360-4731-a32d-3cf60ed7324b-cni-binary-copy\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218102 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-socket-dir-parent\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218147 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-run-k8s-cni-cncf-io\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218308 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5skt\" (UniqueName: \"kubernetes.io/projected/c223e7fe-8360-4731-a32d-3cf60ed7324b-kube-api-access-r5skt\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218461 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-hostroot\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218495 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-daemon-config\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218544 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-system-cni-dir\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218574 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-cni-dir\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218601 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-run-netns\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218628 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/075aa74c-f517-49be-9043-c345ecbff66e-hosts-file\") pod \"node-resolver-d5kwd\" (UID: \"075aa74c-f517-49be-9043-c345ecbff66e\") " pod="openshift-dns/node-resolver-d5kwd" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218798 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-var-lib-cni-bin\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218870 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-var-lib-kubelet\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218899 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-run-multus-certs\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218934 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-os-release\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218957 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-var-lib-cni-multus\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.218985 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6949n\" (UniqueName: \"kubernetes.io/projected/075aa74c-f517-49be-9043-c345ecbff66e-kube-api-access-6949n\") pod \"node-resolver-d5kwd\" (UID: \"075aa74c-f517-49be-9043-c345ecbff66e\") " pod="openshift-dns/node-resolver-d5kwd" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.219029 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-cnibin\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.219054 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-etc-kubernetes\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.220297 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 20:27:27.242456489 +0000 UTC Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.227495 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.235940 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.245756 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.257741 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.258559 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.263043 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.274032 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.288408 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.304967 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.317781 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320466 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-os-release\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320497 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-var-lib-cni-multus\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320521 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6949n\" (UniqueName: \"kubernetes.io/projected/075aa74c-f517-49be-9043-c345ecbff66e-kube-api-access-6949n\") pod \"node-resolver-d5kwd\" (UID: \"075aa74c-f517-49be-9043-c345ecbff66e\") " pod="openshift-dns/node-resolver-d5kwd" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320541 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-cnibin\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320558 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-etc-kubernetes\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320588 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-conf-dir\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320610 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c223e7fe-8360-4731-a32d-3cf60ed7324b-cni-binary-copy\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320628 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-socket-dir-parent\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320627 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-var-lib-cni-multus\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320648 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-run-k8s-cni-cncf-io\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320667 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5skt\" (UniqueName: \"kubernetes.io/projected/c223e7fe-8360-4731-a32d-3cf60ed7324b-kube-api-access-r5skt\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320693 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-hostroot\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320699 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-etc-kubernetes\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320714 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-daemon-config\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320744 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-system-cni-dir\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320764 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-cni-dir\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320784 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-run-netns\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320795 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-hostroot\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320802 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/075aa74c-f517-49be-9043-c345ecbff66e-hosts-file\") pod \"node-resolver-d5kwd\" (UID: \"075aa74c-f517-49be-9043-c345ecbff66e\") " pod="openshift-dns/node-resolver-d5kwd" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320818 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-conf-dir\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320872 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-var-lib-cni-bin\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320854 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/075aa74c-f517-49be-9043-c345ecbff66e-hosts-file\") pod \"node-resolver-d5kwd\" (UID: \"075aa74c-f517-49be-9043-c345ecbff66e\") " pod="openshift-dns/node-resolver-d5kwd" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320764 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-socket-dir-parent\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320932 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-run-netns\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320847 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-var-lib-cni-bin\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320956 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-system-cni-dir\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320963 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-cni-dir\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320983 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-var-lib-kubelet\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.320820 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-run-k8s-cni-cncf-io\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.321016 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-var-lib-kubelet\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.321019 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-cnibin\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.321031 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-run-multus-certs\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.321130 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-host-run-multus-certs\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.321483 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c223e7fe-8360-4731-a32d-3cf60ed7324b-os-release\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.321640 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c223e7fe-8360-4731-a32d-3cf60ed7324b-cni-binary-copy\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.322333 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c223e7fe-8360-4731-a32d-3cf60ed7324b-multus-daemon-config\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.335886 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.350858 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.354518 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5skt\" (UniqueName: \"kubernetes.io/projected/c223e7fe-8360-4731-a32d-3cf60ed7324b-kube-api-access-r5skt\") pod \"multus-j2qmt\" (UID: \"c223e7fe-8360-4731-a32d-3cf60ed7324b\") " pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.355374 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6949n\" (UniqueName: \"kubernetes.io/projected/075aa74c-f517-49be-9043-c345ecbff66e-kube-api-access-6949n\") pod \"node-resolver-d5kwd\" (UID: \"075aa74c-f517-49be-9043-c345ecbff66e\") " pod="openshift-dns/node-resolver-d5kwd" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.366254 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.384135 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: E0122 14:03:26.396587 5017 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.397750 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.406381 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-d5kwd" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.414041 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-j2qmt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.415482 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: W0122 14:03:26.425854 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod075aa74c_f517_49be_9043_c345ecbff66e.slice/crio-71033084f8fed705651394a6d9a7028bf60831469428535f17c88aa686ccc945 WatchSource:0}: Error finding container 71033084f8fed705651394a6d9a7028bf60831469428535f17c88aa686ccc945: Status 404 returned error can't find the container with id 71033084f8fed705651394a6d9a7028bf60831469428535f17c88aa686ccc945 Jan 22 14:03:26 crc kubenswrapper[5017]: W0122 14:03:26.429199 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc223e7fe_8360_4731_a32d_3cf60ed7324b.slice/crio-2615267fae01abbed8e152ba04e3151e9aefa7be0b97a3d306565862aca4f83c WatchSource:0}: Error finding container 2615267fae01abbed8e152ba04e3151e9aefa7be0b97a3d306565862aca4f83c: Status 404 returned error can't find the container with id 2615267fae01abbed8e152ba04e3151e9aefa7be0b97a3d306565862aca4f83c Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.442082 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.515528 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-pjvjs"] Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.516417 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-cr62h"] Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.516648 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vgtf7"] Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.516739 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.516801 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.518864 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.521816 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.522085 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.522325 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.522661 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.522840 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.523008 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.523162 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.523301 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.523346 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.523459 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.523495 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.523739 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.523887 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.524068 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.535605 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.552255 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.571257 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.586135 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.600469 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.620576 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625159 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625206 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/955e4d61-cf5f-4df9-8b71-540ac4dd5856-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625228 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/43c64840-5206-4dec-834e-77e0562f19de-ovn-node-metrics-cert\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625248 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-etc-openvswitch\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625273 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-cnibin\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625292 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-slash\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625312 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-systemd\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625351 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3f43f877-4d1f-4041-af1f-39a962f7ac0d-rootfs\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625395 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-ovn\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625433 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/955e4d61-cf5f-4df9-8b71-540ac4dd5856-cni-binary-copy\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625458 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-netd\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625481 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-system-cni-dir\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625498 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4cvx\" (UniqueName: \"kubernetes.io/projected/955e4d61-cf5f-4df9-8b71-540ac4dd5856-kube-api-access-p4cvx\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625513 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7blrz\" (UniqueName: \"kubernetes.io/projected/3f43f877-4d1f-4041-af1f-39a962f7ac0d-kube-api-access-7blrz\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625528 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-node-log\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625603 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-log-socket\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625625 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-netns\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625641 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3f43f877-4d1f-4041-af1f-39a962f7ac0d-proxy-tls\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625666 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-ovn-kubernetes\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625681 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-env-overrides\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625704 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-var-lib-openvswitch\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625718 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625817 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-config\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625836 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-os-release\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625888 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3f43f877-4d1f-4041-af1f-39a962f7ac0d-mcd-auth-proxy-config\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625909 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-openvswitch\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625929 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-kubelet\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625953 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-systemd-units\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625971 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-bin\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.625986 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-script-lib\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.626009 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lx5q\" (UniqueName: \"kubernetes.io/projected/43c64840-5206-4dec-834e-77e0562f19de-kube-api-access-6lx5q\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.636570 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.658411 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.671599 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.688441 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.700154 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.715786 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.727107 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3f43f877-4d1f-4041-af1f-39a962f7ac0d-proxy-tls\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.727182 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-ovn-kubernetes\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.727244 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-env-overrides\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.727299 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-ovn-kubernetes\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.727316 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-config\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.727568 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-var-lib-openvswitch\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.727347 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-var-lib-openvswitch\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728175 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728200 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-os-release\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728222 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3f43f877-4d1f-4041-af1f-39a962f7ac0d-mcd-auth-proxy-config\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728245 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-openvswitch\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728268 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-kubelet\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728287 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-systemd-units\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728309 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-script-lib\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728331 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lx5q\" (UniqueName: \"kubernetes.io/projected/43c64840-5206-4dec-834e-77e0562f19de-kube-api-access-6lx5q\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728353 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-bin\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728359 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-systemd-units\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728406 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-kubelet\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728322 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-openvswitch\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728484 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-env-overrides\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728491 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-os-release\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728529 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-bin\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728604 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-config\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728567 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728377 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.728996 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/955e4d61-cf5f-4df9-8b71-540ac4dd5856-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729039 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729088 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/43c64840-5206-4dec-834e-77e0562f19de-ovn-node-metrics-cert\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729144 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-etc-openvswitch\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729168 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-cnibin\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729188 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-slash\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729241 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-systemd\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729266 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3f43f877-4d1f-4041-af1f-39a962f7ac0d-rootfs\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729268 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-script-lib\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729285 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-ovn\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729339 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-ovn\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729344 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3f43f877-4d1f-4041-af1f-39a962f7ac0d-mcd-auth-proxy-config\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729357 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/955e4d61-cf5f-4df9-8b71-540ac4dd5856-cni-binary-copy\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729373 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-slash\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729382 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-cnibin\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729397 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-systemd\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729387 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-netd\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729409 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3f43f877-4d1f-4041-af1f-39a962f7ac0d-rootfs\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729419 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-netd\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729474 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-etc-openvswitch\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729518 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4cvx\" (UniqueName: \"kubernetes.io/projected/955e4d61-cf5f-4df9-8b71-540ac4dd5856-kube-api-access-p4cvx\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729562 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7blrz\" (UniqueName: \"kubernetes.io/projected/3f43f877-4d1f-4041-af1f-39a962f7ac0d-kube-api-access-7blrz\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729768 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-node-log\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729795 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-log-socket\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729810 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-node-log\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729825 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-system-cni-dir\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729769 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/955e4d61-cf5f-4df9-8b71-540ac4dd5856-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729859 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-netns\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729856 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-log-socket\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729878 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/955e4d61-cf5f-4df9-8b71-540ac4dd5856-system-cni-dir\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729916 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-netns\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729914 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/955e4d61-cf5f-4df9-8b71-540ac4dd5856-cni-binary-copy\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.729956 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.732855 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/43c64840-5206-4dec-834e-77e0562f19de-ovn-node-metrics-cert\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.733499 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3f43f877-4d1f-4041-af1f-39a962f7ac0d-proxy-tls\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.745679 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4cvx\" (UniqueName: \"kubernetes.io/projected/955e4d61-cf5f-4df9-8b71-540ac4dd5856-kube-api-access-p4cvx\") pod \"multus-additional-cni-plugins-pjvjs\" (UID: \"955e4d61-cf5f-4df9-8b71-540ac4dd5856\") " pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.747397 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7blrz\" (UniqueName: \"kubernetes.io/projected/3f43f877-4d1f-4041-af1f-39a962f7ac0d-kube-api-access-7blrz\") pod \"machine-config-daemon-cr62h\" (UID: \"3f43f877-4d1f-4041-af1f-39a962f7ac0d\") " pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.748559 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lx5q\" (UniqueName: \"kubernetes.io/projected/43c64840-5206-4dec-834e-77e0562f19de-kube-api-access-6lx5q\") pod \"ovnkube-node-vgtf7\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.753469 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.769516 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.786790 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.800007 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.821731 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.837191 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.837220 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.855272 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.858225 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.874551 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: W0122 14:03:26.880761 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod955e4d61_cf5f_4df9_8b71_540ac4dd5856.slice/crio-89b06bd008e0168905c139fbc04229860c91ef6e23ed3ec2cb9287b05fb8c0f3 WatchSource:0}: Error finding container 89b06bd008e0168905c139fbc04229860c91ef6e23ed3ec2cb9287b05fb8c0f3: Status 404 returned error can't find the container with id 89b06bd008e0168905c139fbc04229860c91ef6e23ed3ec2cb9287b05fb8c0f3 Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.911895 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.940689 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.960681 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:26 crc kubenswrapper[5017]: I0122 14:03:26.982244 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.021238 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.025331 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.030547 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.038710 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.054269 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.075548 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.092333 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.116249 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.149871 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.168096 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.180853 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.191503 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.210006 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.220999 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 16:25:16.266074874 +0000 UTC Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.224780 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.238426 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.251886 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.257280 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.257357 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:27 crc kubenswrapper[5017]: E0122 14:03:27.257412 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:27 crc kubenswrapper[5017]: E0122 14:03:27.257504 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.257631 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:27 crc kubenswrapper[5017]: E0122 14:03:27.257704 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.272232 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.288763 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.304539 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.318433 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.350153 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.371634 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.387689 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.392541 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" event={"ID":"955e4d61-cf5f-4df9-8b71-540ac4dd5856","Type":"ContainerStarted","Data":"9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c"} Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.392627 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" event={"ID":"955e4d61-cf5f-4df9-8b71-540ac4dd5856","Type":"ContainerStarted","Data":"89b06bd008e0168905c139fbc04229860c91ef6e23ed3ec2cb9287b05fb8c0f3"} Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.398100 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10" exitCode=0 Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.398200 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10"} Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.398248 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"52a774bd5b6e86138690c2dfd67d451bcb9f98fad127a3125411c537377ceb67"} Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.403897 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01"} Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.411964 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerStarted","Data":"dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5"} Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.412063 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerStarted","Data":"b3ecc17b83cfbd10fd7926a149531b50f1d706cc710831e51fcb69c8bb958117"} Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.420073 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2qmt" event={"ID":"c223e7fe-8360-4731-a32d-3cf60ed7324b","Type":"ContainerStarted","Data":"ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd"} Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.420122 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2qmt" event={"ID":"c223e7fe-8360-4731-a32d-3cf60ed7324b","Type":"ContainerStarted","Data":"2615267fae01abbed8e152ba04e3151e9aefa7be0b97a3d306565862aca4f83c"} Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.421613 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-d5kwd" event={"ID":"075aa74c-f517-49be-9043-c345ecbff66e","Type":"ContainerStarted","Data":"c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b"} Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.421690 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-d5kwd" event={"ID":"075aa74c-f517-49be-9043-c345ecbff66e","Type":"ContainerStarted","Data":"71033084f8fed705651394a6d9a7028bf60831469428535f17c88aa686ccc945"} Jan 22 14:03:27 crc kubenswrapper[5017]: E0122 14:03:27.429500 5017 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.441436 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.477949 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.496303 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.515399 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.546081 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.560716 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.575077 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.595462 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.608556 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.625825 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.648567 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.693034 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.727945 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.762079 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.803917 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.842447 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.897654 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.926404 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:27 crc kubenswrapper[5017]: I0122 14:03:27.962747 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.003240 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.046264 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.046389 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.046526 5017 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.046575 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:32.046561876 +0000 UTC m=+27.039023734 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.046609 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:03:32.046600867 +0000 UTC m=+27.039062735 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.051421 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.055445 5017 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.057899 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.057951 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.057966 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.058142 5017 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.074639 5017 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.075000 5017 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.076743 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.076782 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.076796 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.076814 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.076823 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.097042 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.101568 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.101632 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.101645 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.101667 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.101682 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.123937 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.129022 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.129065 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.129077 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.129097 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.129142 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.142280 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.146014 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.146042 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.146054 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.146072 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.146085 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.147698 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.147733 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.147760 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.147842 5017 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.147889 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:32.147873079 +0000 UTC m=+27.140334937 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.148183 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.148204 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.148218 5017 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.148252 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:32.14824015 +0000 UTC m=+27.140702008 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.148313 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.148325 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.148333 5017 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.148362 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:32.148353503 +0000 UTC m=+27.140815361 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.162759 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.165812 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.165835 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.165842 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.165856 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.165865 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.179170 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: E0122 14:03:28.179340 5017 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.181904 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.181931 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.181942 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.181958 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.181969 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.222242 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 19:02:05.672254686 +0000 UTC Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.285133 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.285183 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.285213 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.285235 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.285259 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.387892 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.388258 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.388272 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.388289 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.388299 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.427498 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerStarted","Data":"65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.429488 5017 generic.go:334] "Generic (PLEG): container finished" podID="955e4d61-cf5f-4df9-8b71-540ac4dd5856" containerID="9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c" exitCode=0 Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.429541 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" event={"ID":"955e4d61-cf5f-4df9-8b71-540ac4dd5856","Type":"ContainerDied","Data":"9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.435893 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.435969 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.435987 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.436002 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.436014 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.436026 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.442715 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.462471 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.482925 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.491933 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-6m84c"] Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.492409 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.494280 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.494297 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.494316 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.494329 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.494343 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.494356 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.495089 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.495538 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.497051 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.503355 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.524707 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.536750 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.557249 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.571034 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.583825 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.596409 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.596445 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.596454 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.596469 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.596480 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.599984 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.610494 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.646462 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.652072 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/6a468d1c-9b98-4126-a4b3-fa1af1a19768-serviceca\") pod \"node-ca-6m84c\" (UID: \"6a468d1c-9b98-4126-a4b3-fa1af1a19768\") " pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.652258 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a468d1c-9b98-4126-a4b3-fa1af1a19768-host\") pod \"node-ca-6m84c\" (UID: \"6a468d1c-9b98-4126-a4b3-fa1af1a19768\") " pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.652365 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm6h5\" (UniqueName: \"kubernetes.io/projected/6a468d1c-9b98-4126-a4b3-fa1af1a19768-kube-api-access-lm6h5\") pod \"node-ca-6m84c\" (UID: \"6a468d1c-9b98-4126-a4b3-fa1af1a19768\") " pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.682320 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.698703 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.698742 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.698751 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.698802 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.698812 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.722203 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.752846 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/6a468d1c-9b98-4126-a4b3-fa1af1a19768-serviceca\") pod \"node-ca-6m84c\" (UID: \"6a468d1c-9b98-4126-a4b3-fa1af1a19768\") " pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.752890 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a468d1c-9b98-4126-a4b3-fa1af1a19768-host\") pod \"node-ca-6m84c\" (UID: \"6a468d1c-9b98-4126-a4b3-fa1af1a19768\") " pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.752906 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm6h5\" (UniqueName: \"kubernetes.io/projected/6a468d1c-9b98-4126-a4b3-fa1af1a19768-kube-api-access-lm6h5\") pod \"node-ca-6m84c\" (UID: \"6a468d1c-9b98-4126-a4b3-fa1af1a19768\") " pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.753037 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a468d1c-9b98-4126-a4b3-fa1af1a19768-host\") pod \"node-ca-6m84c\" (UID: \"6a468d1c-9b98-4126-a4b3-fa1af1a19768\") " pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.753971 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/6a468d1c-9b98-4126-a4b3-fa1af1a19768-serviceca\") pod \"node-ca-6m84c\" (UID: \"6a468d1c-9b98-4126-a4b3-fa1af1a19768\") " pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.763089 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.792537 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm6h5\" (UniqueName: \"kubernetes.io/projected/6a468d1c-9b98-4126-a4b3-fa1af1a19768-kube-api-access-lm6h5\") pod \"node-ca-6m84c\" (UID: \"6a468d1c-9b98-4126-a4b3-fa1af1a19768\") " pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.801530 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.801569 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.801581 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.801597 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.801607 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.805094 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6m84c" Jan 22 14:03:28 crc kubenswrapper[5017]: W0122 14:03:28.818240 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a468d1c_9b98_4126_a4b3_fa1af1a19768.slice/crio-4325d9d1931054c55fd2128b780f83567bdb739fcc6773a4f4a9db402e4f3393 WatchSource:0}: Error finding container 4325d9d1931054c55fd2128b780f83567bdb739fcc6773a4f4a9db402e4f3393: Status 404 returned error can't find the container with id 4325d9d1931054c55fd2128b780f83567bdb739fcc6773a4f4a9db402e4f3393 Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.825905 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.876132 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.904986 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.905023 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.905032 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.905048 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.905057 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:28Z","lastTransitionTime":"2026-01-22T14:03:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.913822 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.951101 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:28 crc kubenswrapper[5017]: I0122 14:03:28.986174 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.007454 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.007490 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.007499 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.007511 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.007520 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:29Z","lastTransitionTime":"2026-01-22T14:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.021382 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.066430 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.103854 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.109874 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.109926 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.109942 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.109962 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.109976 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:29Z","lastTransitionTime":"2026-01-22T14:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.142513 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.182396 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.212729 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.212778 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.212792 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.212809 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.212821 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:29Z","lastTransitionTime":"2026-01-22T14:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.222657 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 15:45:59.223854323 +0000 UTC Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.222690 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.257547 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.257567 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:29 crc kubenswrapper[5017]: E0122 14:03:29.257685 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.257742 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:29 crc kubenswrapper[5017]: E0122 14:03:29.257848 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:29 crc kubenswrapper[5017]: E0122 14:03:29.257999 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.266955 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.303234 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.315793 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.315827 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.315836 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.315849 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.315859 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:29Z","lastTransitionTime":"2026-01-22T14:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.345147 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.418348 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.418390 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.418401 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.418415 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.418427 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:29Z","lastTransitionTime":"2026-01-22T14:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.440383 5017 generic.go:334] "Generic (PLEG): container finished" podID="955e4d61-cf5f-4df9-8b71-540ac4dd5856" containerID="a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7" exitCode=0 Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.440421 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" event={"ID":"955e4d61-cf5f-4df9-8b71-540ac4dd5856","Type":"ContainerDied","Data":"a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.442306 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6m84c" event={"ID":"6a468d1c-9b98-4126-a4b3-fa1af1a19768","Type":"ContainerStarted","Data":"aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.442378 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6m84c" event={"ID":"6a468d1c-9b98-4126-a4b3-fa1af1a19768","Type":"ContainerStarted","Data":"4325d9d1931054c55fd2128b780f83567bdb739fcc6773a4f4a9db402e4f3393"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.459009 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.480472 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.495914 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.509374 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.522294 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.522337 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.522348 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.522366 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.522378 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:29Z","lastTransitionTime":"2026-01-22T14:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.558085 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.582895 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.623619 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.626365 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.626395 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.626405 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.626420 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.626431 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:29Z","lastTransitionTime":"2026-01-22T14:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.666199 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.700951 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.729026 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.729071 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.729082 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.729100 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.729129 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:29Z","lastTransitionTime":"2026-01-22T14:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.746170 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.783494 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.823721 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.831379 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.831411 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.831420 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.831434 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.831444 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:29Z","lastTransitionTime":"2026-01-22T14:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.861675 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.900685 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.934271 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.934312 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.934321 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.934337 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.934347 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:29Z","lastTransitionTime":"2026-01-22T14:03:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.950824 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:29 crc kubenswrapper[5017]: I0122 14:03:29.986838 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.022512 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.036425 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.036482 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.036494 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.036509 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.036519 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:30Z","lastTransitionTime":"2026-01-22T14:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.062528 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.102187 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.139086 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.139145 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.139155 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.139169 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.139179 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:30Z","lastTransitionTime":"2026-01-22T14:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.145096 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.181666 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.221508 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.223610 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 06:41:27.835132277 +0000 UTC Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.241770 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.241808 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.241817 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.241832 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.241842 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:30Z","lastTransitionTime":"2026-01-22T14:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.260027 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.308133 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.343015 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.344632 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.344678 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.344689 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.344710 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.344721 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:30Z","lastTransitionTime":"2026-01-22T14:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.382359 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.421876 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.446749 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.446785 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.446793 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.446806 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.446816 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:30Z","lastTransitionTime":"2026-01-22T14:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.448425 5017 generic.go:334] "Generic (PLEG): container finished" podID="955e4d61-cf5f-4df9-8b71-540ac4dd5856" containerID="f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e" exitCode=0 Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.448465 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" event={"ID":"955e4d61-cf5f-4df9-8b71-540ac4dd5856","Type":"ContainerDied","Data":"f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.460557 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.510346 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.545045 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.549645 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.549709 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.549720 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.549740 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.549752 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:30Z","lastTransitionTime":"2026-01-22T14:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.582933 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.623310 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.653147 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.653410 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.653498 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.653579 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.653659 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:30Z","lastTransitionTime":"2026-01-22T14:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.662591 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.701272 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.757622 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.757659 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.757669 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.757685 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.757696 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:30Z","lastTransitionTime":"2026-01-22T14:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.758496 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.789218 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.826692 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.859887 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.859918 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.859927 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.859940 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.859949 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:30Z","lastTransitionTime":"2026-01-22T14:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.861669 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.900317 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.941671 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.961975 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.962018 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.962027 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.962043 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.962061 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:30Z","lastTransitionTime":"2026-01-22T14:03:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:30 crc kubenswrapper[5017]: I0122 14:03:30.984095 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.022841 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.062267 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.063908 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.063947 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.063957 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.063972 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.063982 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:31Z","lastTransitionTime":"2026-01-22T14:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.101199 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.147435 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.165939 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.165971 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.165980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.165993 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.166002 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:31Z","lastTransitionTime":"2026-01-22T14:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.223905 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 19:18:15.249542141 +0000 UTC Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.257275 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.257397 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.257518 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:31 crc kubenswrapper[5017]: E0122 14:03:31.257515 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:31 crc kubenswrapper[5017]: E0122 14:03:31.257634 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:31 crc kubenswrapper[5017]: E0122 14:03:31.257731 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.268173 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.268215 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.268226 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.268242 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.268253 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:31Z","lastTransitionTime":"2026-01-22T14:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.369955 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.369991 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.370006 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.370025 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.370037 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:31Z","lastTransitionTime":"2026-01-22T14:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.454982 5017 generic.go:334] "Generic (PLEG): container finished" podID="955e4d61-cf5f-4df9-8b71-540ac4dd5856" containerID="e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00" exitCode=0 Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.455055 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" event={"ID":"955e4d61-cf5f-4df9-8b71-540ac4dd5856","Type":"ContainerDied","Data":"e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.459571 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.470204 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.473465 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.473501 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.473510 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.473525 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.473535 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:31Z","lastTransitionTime":"2026-01-22T14:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.487200 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.499786 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.511294 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.530089 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.542929 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.554435 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.564813 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.575932 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.576346 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.576386 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.576395 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.576409 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.576418 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:31Z","lastTransitionTime":"2026-01-22T14:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.591930 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.611877 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.629442 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.661520 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.678823 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.678856 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.678865 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.678883 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.678899 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:31Z","lastTransitionTime":"2026-01-22T14:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.699084 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.747704 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.781781 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.781830 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.781842 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.781863 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.781877 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:31Z","lastTransitionTime":"2026-01-22T14:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.884891 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.884953 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.884965 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.884991 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.885013 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:31Z","lastTransitionTime":"2026-01-22T14:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.987229 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.987268 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.987277 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.987290 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:31 crc kubenswrapper[5017]: I0122 14:03:31.987299 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:31Z","lastTransitionTime":"2026-01-22T14:03:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.088945 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.089088 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.089171 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:03:40.089110617 +0000 UTC m=+35.081572615 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.089232 5017 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.089296 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:40.089280002 +0000 UTC m=+35.081741860 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.090373 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.090421 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.090430 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.090446 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.090455 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:32Z","lastTransitionTime":"2026-01-22T14:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.191247 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.191422 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.191468 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.191581 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.191624 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.191634 5017 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.191644 5017 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.191742 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:40.191707948 +0000 UTC m=+35.184169856 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.191581 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.191777 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.191785 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:40.191766169 +0000 UTC m=+35.184228077 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.191790 5017 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:32 crc kubenswrapper[5017]: E0122 14:03:32.191868 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:40.191855872 +0000 UTC m=+35.184317770 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.193280 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.193317 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.193329 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.193345 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.193356 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:32Z","lastTransitionTime":"2026-01-22T14:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.224655 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 03:27:45.015508605 +0000 UTC Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.295238 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.295282 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.295294 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.295312 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.295325 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:32Z","lastTransitionTime":"2026-01-22T14:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.398121 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.398150 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.398160 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.398172 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.398180 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:32Z","lastTransitionTime":"2026-01-22T14:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.465359 5017 generic.go:334] "Generic (PLEG): container finished" podID="955e4d61-cf5f-4df9-8b71-540ac4dd5856" containerID="a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b" exitCode=0 Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.465402 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" event={"ID":"955e4d61-cf5f-4df9-8b71-540ac4dd5856","Type":"ContainerDied","Data":"a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b"} Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.480450 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.495306 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.499893 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.499919 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.499929 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.499945 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.499955 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:32Z","lastTransitionTime":"2026-01-22T14:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.509826 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.524880 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.537338 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.551157 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.566008 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.583998 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.599483 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.602014 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.602042 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.602050 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.602063 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.602071 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:32Z","lastTransitionTime":"2026-01-22T14:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.612693 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.624979 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.635996 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.655021 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.668952 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.681760 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.705410 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.705663 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.705745 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.705814 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.705884 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:32Z","lastTransitionTime":"2026-01-22T14:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.808741 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.808780 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.808790 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.808804 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.808815 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:32Z","lastTransitionTime":"2026-01-22T14:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.911522 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.911573 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.911587 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.911606 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:32 crc kubenswrapper[5017]: I0122 14:03:32.911618 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:32Z","lastTransitionTime":"2026-01-22T14:03:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.014665 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.014933 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.015020 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.015152 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.015272 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:33Z","lastTransitionTime":"2026-01-22T14:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.117442 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.117494 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.117508 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.117528 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.117540 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:33Z","lastTransitionTime":"2026-01-22T14:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.219967 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.220017 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.220030 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.220047 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.220061 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:33Z","lastTransitionTime":"2026-01-22T14:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.225636 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 06:11:43.977982215 +0000 UTC Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.257010 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.257054 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.257088 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:33 crc kubenswrapper[5017]: E0122 14:03:33.257198 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:33 crc kubenswrapper[5017]: E0122 14:03:33.257262 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:33 crc kubenswrapper[5017]: E0122 14:03:33.257312 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.322246 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.322296 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.322308 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.322325 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.322336 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:33Z","lastTransitionTime":"2026-01-22T14:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.425048 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.425090 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.425099 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.425130 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.425140 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:33Z","lastTransitionTime":"2026-01-22T14:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.472411 5017 generic.go:334] "Generic (PLEG): container finished" podID="955e4d61-cf5f-4df9-8b71-540ac4dd5856" containerID="40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373" exitCode=0 Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.472471 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" event={"ID":"955e4d61-cf5f-4df9-8b71-540ac4dd5856","Type":"ContainerDied","Data":"40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.477020 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.477903 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.478005 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.495777 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.502594 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.504855 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.512830 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.527697 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.528300 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.528332 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.528340 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.528356 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.528365 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:33Z","lastTransitionTime":"2026-01-22T14:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.540795 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.553251 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.573025 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.588076 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.604551 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.618435 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.630405 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.630437 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.630447 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.630463 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.630478 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:33Z","lastTransitionTime":"2026-01-22T14:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.635904 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.648252 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.660712 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.675676 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.688911 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.699460 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.712069 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.722435 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.732687 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.732731 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.732744 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.732762 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.732775 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:33Z","lastTransitionTime":"2026-01-22T14:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.743514 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.774401 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.799798 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.817379 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.834750 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.835321 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.835362 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.835377 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.835393 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.835406 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:33Z","lastTransitionTime":"2026-01-22T14:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.862329 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.876165 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.888797 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.902343 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.919949 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.935872 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.938561 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.938656 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.938671 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.938689 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.938702 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:33Z","lastTransitionTime":"2026-01-22T14:03:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.947179 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:33 crc kubenswrapper[5017]: I0122 14:03:33.959206 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.040811 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.040857 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.040869 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.040886 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.040899 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:34Z","lastTransitionTime":"2026-01-22T14:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.143874 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.143930 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.143947 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.143984 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.143998 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:34Z","lastTransitionTime":"2026-01-22T14:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.226464 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 06:31:32.303802251 +0000 UTC Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.246902 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.247419 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.247442 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.247467 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.247488 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:34Z","lastTransitionTime":"2026-01-22T14:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.349778 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.349822 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.349831 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.349848 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.349858 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:34Z","lastTransitionTime":"2026-01-22T14:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.452483 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.452543 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.452559 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.452580 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.452595 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:34Z","lastTransitionTime":"2026-01-22T14:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.484521 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" event={"ID":"955e4d61-cf5f-4df9-8b71-540ac4dd5856","Type":"ContainerStarted","Data":"27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d"} Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.484608 5017 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.509573 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.527963 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.543835 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.555422 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.555456 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.555466 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.555480 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.555488 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:34Z","lastTransitionTime":"2026-01-22T14:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.557761 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.571240 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.593411 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.611445 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.627506 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.642447 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.657668 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.657713 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.657723 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.657741 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.657750 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:34Z","lastTransitionTime":"2026-01-22T14:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.662922 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.676753 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.689467 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.704543 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.717722 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.729843 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.759716 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.759762 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.759775 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.759792 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.759805 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:34Z","lastTransitionTime":"2026-01-22T14:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.862072 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.862148 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.862161 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.862179 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.862195 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:34Z","lastTransitionTime":"2026-01-22T14:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.964910 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.964989 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.965044 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.965077 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:34 crc kubenswrapper[5017]: I0122 14:03:34.965096 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:34Z","lastTransitionTime":"2026-01-22T14:03:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.067897 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.067949 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.067967 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.067991 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.068008 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:35Z","lastTransitionTime":"2026-01-22T14:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.171469 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.171514 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.171524 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.171541 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.171551 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:35Z","lastTransitionTime":"2026-01-22T14:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.227079 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 18:01:28.309699697 +0000 UTC Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.258677 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:35 crc kubenswrapper[5017]: E0122 14:03:35.258817 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.258926 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.258935 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:35 crc kubenswrapper[5017]: E0122 14:03:35.259185 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:35 crc kubenswrapper[5017]: E0122 14:03:35.259241 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.273700 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.273743 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.273756 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.273774 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.273785 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:35Z","lastTransitionTime":"2026-01-22T14:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.276716 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.287561 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.312411 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.330203 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.353373 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.366765 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.376672 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.376704 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.376717 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.376734 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.376747 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:35Z","lastTransitionTime":"2026-01-22T14:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.381163 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.401815 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.414734 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.426962 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.439533 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.451024 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.473030 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.478864 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.478900 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.478910 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.478923 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.478932 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:35Z","lastTransitionTime":"2026-01-22T14:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.486970 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.487825 5017 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.499407 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.581962 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.582010 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.582023 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.582037 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.582046 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:35Z","lastTransitionTime":"2026-01-22T14:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.684282 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.684329 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.684338 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.684351 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.684361 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:35Z","lastTransitionTime":"2026-01-22T14:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.786933 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.786972 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.786983 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.787000 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.787013 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:35Z","lastTransitionTime":"2026-01-22T14:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.889183 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.889214 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.889222 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.889234 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.889242 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:35Z","lastTransitionTime":"2026-01-22T14:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.983783 5017 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.991955 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.992004 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.992016 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.992034 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:35 crc kubenswrapper[5017]: I0122 14:03:35.992046 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:35Z","lastTransitionTime":"2026-01-22T14:03:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.094559 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.094603 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.094614 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.094629 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.094641 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:36Z","lastTransitionTime":"2026-01-22T14:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.196719 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.196796 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.196810 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.196828 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.196838 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:36Z","lastTransitionTime":"2026-01-22T14:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.227731 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 13:05:22.118936138 +0000 UTC Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.299511 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.299560 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.299573 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.299590 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.299601 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:36Z","lastTransitionTime":"2026-01-22T14:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.402466 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.402509 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.402519 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.402535 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.402544 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:36Z","lastTransitionTime":"2026-01-22T14:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.492326 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/0.log" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.494857 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5" exitCode=1 Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.494897 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5"} Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.495526 5017 scope.go:117] "RemoveContainer" containerID="0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.505122 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.505189 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.505200 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.505212 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.505222 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:36Z","lastTransitionTime":"2026-01-22T14:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.511761 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.524264 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.545575 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.559731 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.574803 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.586156 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.596599 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.606944 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.606975 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.606983 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.607014 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.607024 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:36Z","lastTransitionTime":"2026-01-22T14:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.612088 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.625903 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.642951 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.654319 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.663485 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.680749 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:35Z\\\",\\\"message\\\":\\\"391 6316 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 14:03:35.599426 6316 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:03:35.599403 6316 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 14:03:35.599463 6316 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:03:35.599471 6316 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:03:35.599485 6316 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:03:35.599506 6316 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:03:35.599513 6316 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:03:35.599516 6316 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:03:35.599528 6316 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:03:35.599547 6316 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:03:35.599556 6316 factory.go:656] Stopping watch factory\\\\nI0122 14:03:35.599558 6316 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:03:35.599584 6316 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.694091 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.706805 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.709168 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.709211 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.709219 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.709233 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.709242 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:36Z","lastTransitionTime":"2026-01-22T14:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.811466 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.811508 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.811518 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.811532 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.811542 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:36Z","lastTransitionTime":"2026-01-22T14:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.914280 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.914541 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.914611 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.914688 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:36 crc kubenswrapper[5017]: I0122 14:03:36.914750 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:36Z","lastTransitionTime":"2026-01-22T14:03:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.017495 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.017531 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.017542 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.017596 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.017610 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:37Z","lastTransitionTime":"2026-01-22T14:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.120487 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.120521 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.120530 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.120558 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.120575 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:37Z","lastTransitionTime":"2026-01-22T14:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.224293 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.224346 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.224360 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.224386 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.224399 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:37Z","lastTransitionTime":"2026-01-22T14:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.228223 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 16:10:23.654240389 +0000 UTC Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.257023 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.257020 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.257035 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:37 crc kubenswrapper[5017]: E0122 14:03:37.257192 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:37 crc kubenswrapper[5017]: E0122 14:03:37.257263 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:37 crc kubenswrapper[5017]: E0122 14:03:37.257356 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.328017 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.328084 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.328103 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.328158 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.328177 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:37Z","lastTransitionTime":"2026-01-22T14:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.430278 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.430353 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.430371 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.430394 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.430410 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:37Z","lastTransitionTime":"2026-01-22T14:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.499424 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/0.log" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.502629 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.502994 5017 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.523805 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.533463 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.533493 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.533501 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.533514 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.533522 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:37Z","lastTransitionTime":"2026-01-22T14:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.539855 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.563972 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.585259 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.599716 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.613568 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.636369 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.636413 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.636423 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.636437 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.636447 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:37Z","lastTransitionTime":"2026-01-22T14:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.637901 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.652479 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.666012 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.678451 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.687802 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.704107 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:35Z\\\",\\\"message\\\":\\\"391 6316 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 14:03:35.599426 6316 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:03:35.599403 6316 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 14:03:35.599463 6316 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:03:35.599471 6316 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:03:35.599485 6316 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:03:35.599506 6316 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:03:35.599513 6316 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:03:35.599516 6316 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:03:35.599528 6316 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:03:35.599547 6316 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:03:35.599556 6316 factory.go:656] Stopping watch factory\\\\nI0122 14:03:35.599558 6316 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:03:35.599584 6316 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.716497 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.732340 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.739278 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.739323 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.739341 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.739360 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.739374 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:37Z","lastTransitionTime":"2026-01-22T14:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.745799 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.842136 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.842186 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.842196 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.842211 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.842224 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:37Z","lastTransitionTime":"2026-01-22T14:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.944679 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.944744 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.944757 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.944774 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:37 crc kubenswrapper[5017]: I0122 14:03:37.944785 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:37Z","lastTransitionTime":"2026-01-22T14:03:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.046691 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.046726 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.046736 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.046752 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.046763 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.060255 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd"] Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.060745 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.062923 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.065429 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.074035 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.085808 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.099262 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.111317 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.122224 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.139150 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.148071 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7c8n\" (UniqueName: \"kubernetes.io/projected/5f96871c-5cdd-496a-9c9b-b669151acffd-kube-api-access-x7c8n\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.148167 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5f96871c-5cdd-496a-9c9b-b669151acffd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.148197 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5f96871c-5cdd-496a-9c9b-b669151acffd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.148218 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5f96871c-5cdd-496a-9c9b-b669151acffd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.149269 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.149301 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.149313 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.149340 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.149350 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.152910 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.165015 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.174960 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.184221 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.200236 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:35Z\\\",\\\"message\\\":\\\"391 6316 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 14:03:35.599426 6316 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:03:35.599403 6316 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 14:03:35.599463 6316 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:03:35.599471 6316 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:03:35.599485 6316 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:03:35.599506 6316 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:03:35.599513 6316 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:03:35.599516 6316 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:03:35.599528 6316 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:03:35.599547 6316 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:03:35.599556 6316 factory.go:656] Stopping watch factory\\\\nI0122 14:03:35.599558 6316 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:03:35.599584 6316 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.212796 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.228840 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 20:14:49.888402639 +0000 UTC Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.229384 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.241515 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.249441 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5f96871c-5cdd-496a-9c9b-b669151acffd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.249491 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5f96871c-5cdd-496a-9c9b-b669151acffd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.249516 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5f96871c-5cdd-496a-9c9b-b669151acffd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.249569 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7c8n\" (UniqueName: \"kubernetes.io/projected/5f96871c-5cdd-496a-9c9b-b669151acffd-kube-api-access-x7c8n\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.250190 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5f96871c-5cdd-496a-9c9b-b669151acffd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.250684 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5f96871c-5cdd-496a-9c9b-b669151acffd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.251487 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.251630 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.251746 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.251832 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.251915 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.252706 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.256205 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5f96871c-5cdd-496a-9c9b-b669151acffd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.266012 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7c8n\" (UniqueName: \"kubernetes.io/projected/5f96871c-5cdd-496a-9c9b-b669151acffd-kube-api-access-x7c8n\") pod \"ovnkube-control-plane-749d76644c-hvbsd\" (UID: \"5f96871c-5cdd-496a-9c9b-b669151acffd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.274360 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.305724 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.305942 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.306028 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.306101 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.306189 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: E0122 14:03:38.319968 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.324285 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.324396 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.324453 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.324508 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.324574 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: E0122 14:03:38.337541 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.341390 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.341491 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.341557 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.341631 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.341688 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: E0122 14:03:38.354623 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.357714 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.357799 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.357868 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.357926 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.357977 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: E0122 14:03:38.371699 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.374512 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.376769 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.376792 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.376803 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.376819 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.376829 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: W0122 14:03:38.390640 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f96871c_5cdd_496a_9c9b_b669151acffd.slice/crio-9c18d622e0ab7f2aef6313feae794f8b5c333852299d31b0ccfcc0294a742802 WatchSource:0}: Error finding container 9c18d622e0ab7f2aef6313feae794f8b5c333852299d31b0ccfcc0294a742802: Status 404 returned error can't find the container with id 9c18d622e0ab7f2aef6313feae794f8b5c333852299d31b0ccfcc0294a742802 Jan 22 14:03:38 crc kubenswrapper[5017]: E0122 14:03:38.393979 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: E0122 14:03:38.394149 5017 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.395482 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.395512 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.395523 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.395538 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.395549 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.498218 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.498254 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.498263 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.498277 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.498286 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.509479 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/1.log" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.510987 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/0.log" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.514439 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c" exitCode=1 Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.514513 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.514590 5017 scope.go:117] "RemoveContainer" containerID="0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.515418 5017 scope.go:117] "RemoveContainer" containerID="9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c" Jan 22 14:03:38 crc kubenswrapper[5017]: E0122 14:03:38.515582 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.517432 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" event={"ID":"5f96871c-5cdd-496a-9c9b-b669151acffd","Type":"ContainerStarted","Data":"9c18d622e0ab7f2aef6313feae794f8b5c333852299d31b0ccfcc0294a742802"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.529220 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.541249 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.560641 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.575250 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.588603 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.600407 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.600461 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.600472 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.600493 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.600506 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.601912 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.616337 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.628356 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.638783 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.652434 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.664397 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.674425 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.692628 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:35Z\\\",\\\"message\\\":\\\"391 6316 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 14:03:35.599426 6316 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:03:35.599403 6316 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 14:03:35.599463 6316 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:03:35.599471 6316 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:03:35.599485 6316 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:03:35.599506 6316 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:03:35.599513 6316 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:03:35.599516 6316 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:03:35.599528 6316 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:03:35.599547 6316 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:03:35.599556 6316 factory.go:656] Stopping watch factory\\\\nI0122 14:03:35.599558 6316 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:03:35.599584 6316 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:37Z\\\",\\\"message\\\":\\\"led to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z]\\\\nI0122 14:03:37.199963 6464 services_controller.go:434] Service openshift-dns-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-dns-operator 4bf7a6e2-037e-4e09-ad6b-2e7f1059a532 4106 0 2025-02-23 05:12:23 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:dns-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007391ad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.703369 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.703425 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.703441 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.703464 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.703482 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.704348 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.717723 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.730818 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:38Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.806126 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.806169 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.806182 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.806199 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.806212 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.909292 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.909335 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.909347 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.909362 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:38 crc kubenswrapper[5017]: I0122 14:03:38.909370 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:38Z","lastTransitionTime":"2026-01-22T14:03:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.011708 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.011758 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.011770 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.011788 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.011800 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:39Z","lastTransitionTime":"2026-01-22T14:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.113615 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.113662 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.113673 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.113690 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.113701 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:39Z","lastTransitionTime":"2026-01-22T14:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.216787 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.216832 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.216843 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.216863 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.216878 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:39Z","lastTransitionTime":"2026-01-22T14:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.229445 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 11:04:56.034823502 +0000 UTC Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.258310 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:39 crc kubenswrapper[5017]: E0122 14:03:39.258449 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.258543 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:39 crc kubenswrapper[5017]: E0122 14:03:39.258668 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.258931 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:39 crc kubenswrapper[5017]: E0122 14:03:39.259001 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.319324 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.319355 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.319364 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.319378 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.319388 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:39Z","lastTransitionTime":"2026-01-22T14:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.421381 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.421415 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.421425 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.421442 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.421454 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:39Z","lastTransitionTime":"2026-01-22T14:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.521795 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/1.log" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.522989 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.523021 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.523031 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.523045 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.523057 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:39Z","lastTransitionTime":"2026-01-22T14:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.525796 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" event={"ID":"5f96871c-5cdd-496a-9c9b-b669151acffd","Type":"ContainerStarted","Data":"3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.525827 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" event={"ID":"5f96871c-5cdd-496a-9c9b-b669151acffd","Type":"ContainerStarted","Data":"44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.538127 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.548540 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.559732 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.570522 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.583080 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.596368 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.608004 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.618394 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.625327 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.625379 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.625391 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.625410 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.625424 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:39Z","lastTransitionTime":"2026-01-22T14:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.642049 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.653703 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.665299 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.677033 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.687988 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.707371 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:35Z\\\",\\\"message\\\":\\\"391 6316 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 14:03:35.599426 6316 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:03:35.599403 6316 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 14:03:35.599463 6316 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:03:35.599471 6316 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:03:35.599485 6316 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:03:35.599506 6316 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:03:35.599513 6316 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:03:35.599516 6316 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:03:35.599528 6316 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:03:35.599547 6316 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:03:35.599556 6316 factory.go:656] Stopping watch factory\\\\nI0122 14:03:35.599558 6316 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:03:35.599584 6316 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:37Z\\\",\\\"message\\\":\\\"led to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z]\\\\nI0122 14:03:37.199963 6464 services_controller.go:434] Service openshift-dns-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-dns-operator 4bf7a6e2-037e-4e09-ad6b-2e7f1059a532 4106 0 2025-02-23 05:12:23 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:dns-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007391ad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.717591 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.727914 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.727943 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.727951 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.727963 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.727973 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:39Z","lastTransitionTime":"2026-01-22T14:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.733579 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.830036 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.830072 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.830081 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.830095 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.830106 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:39Z","lastTransitionTime":"2026-01-22T14:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.881986 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-gc29v"] Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.882533 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:39 crc kubenswrapper[5017]: E0122 14:03:39.882603 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.901015 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.914676 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.933315 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.933366 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.933376 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.933391 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.933401 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:39Z","lastTransitionTime":"2026-01-22T14:03:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.940189 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.965479 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.965516 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnfsn\" (UniqueName: \"kubernetes.io/projected/bca74e01-1d78-4eeb-b3db-842fda0babd7-kube-api-access-wnfsn\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.973690 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.985859 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:39 crc kubenswrapper[5017]: I0122 14:03:39.999254 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.010894 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.024407 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.035329 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.036256 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.036295 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.036307 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.036322 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.036333 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:40Z","lastTransitionTime":"2026-01-22T14:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.054000 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.065853 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.066152 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.066190 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnfsn\" (UniqueName: \"kubernetes.io/projected/bca74e01-1d78-4eeb-b3db-842fda0babd7-kube-api-access-wnfsn\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.066366 5017 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.066434 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs podName:bca74e01-1d78-4eeb-b3db-842fda0babd7 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:40.566413846 +0000 UTC m=+35.558875774 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs") pod "network-metrics-daemon-gc29v" (UID: "bca74e01-1d78-4eeb-b3db-842fda0babd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.079512 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.082317 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnfsn\" (UniqueName: \"kubernetes.io/projected/bca74e01-1d78-4eeb-b3db-842fda0babd7-kube-api-access-wnfsn\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.092599 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.104055 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.125493 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:35Z\\\",\\\"message\\\":\\\"391 6316 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 14:03:35.599426 6316 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:03:35.599403 6316 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 14:03:35.599463 6316 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:03:35.599471 6316 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:03:35.599485 6316 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:03:35.599506 6316 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:03:35.599513 6316 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:03:35.599516 6316 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:03:35.599528 6316 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:03:35.599547 6316 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:03:35.599556 6316 factory.go:656] Stopping watch factory\\\\nI0122 14:03:35.599558 6316 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:03:35.599584 6316 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:37Z\\\",\\\"message\\\":\\\"led to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z]\\\\nI0122 14:03:37.199963 6464 services_controller.go:434] Service openshift-dns-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-dns-operator 4bf7a6e2-037e-4e09-ad6b-2e7f1059a532 4106 0 2025-02-23 05:12:23 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:dns-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007391ad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.139084 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.139136 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.139169 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.139188 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.139056 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.139199 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:40Z","lastTransitionTime":"2026-01-22T14:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.152797 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.166958 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.167061 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.167192 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:03:56.167165053 +0000 UTC m=+51.159626951 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.167211 5017 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.167268 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:56.167245965 +0000 UTC m=+51.159707823 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.230654 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 09:41:27.107227962 +0000 UTC Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.241421 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.241467 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.241476 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.241492 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.241500 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:40Z","lastTransitionTime":"2026-01-22T14:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.268629 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.268716 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.268866 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.268829 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.268896 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.268942 5017 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.268974 5017 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.268975 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.269042 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:56.269013392 +0000 UTC m=+51.261475280 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.269044 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.269081 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:56.269063093 +0000 UTC m=+51.261524991 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.269086 5017 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.269208 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:56.269189187 +0000 UTC m=+51.261651085 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.344090 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.344201 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.344222 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.344251 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.344272 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:40Z","lastTransitionTime":"2026-01-22T14:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.446873 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.447289 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.447302 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.447321 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.447333 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:40Z","lastTransitionTime":"2026-01-22T14:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.549457 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.549492 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.549503 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.549517 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.549527 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:40Z","lastTransitionTime":"2026-01-22T14:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.571515 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.571765 5017 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: E0122 14:03:40.571824 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs podName:bca74e01-1d78-4eeb-b3db-842fda0babd7 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:41.571810098 +0000 UTC m=+36.564271956 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs") pod "network-metrics-daemon-gc29v" (UID: "bca74e01-1d78-4eeb-b3db-842fda0babd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.652149 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.652196 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.652210 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.652229 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.652242 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:40Z","lastTransitionTime":"2026-01-22T14:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.754994 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.755061 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.755079 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.755097 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.755109 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:40Z","lastTransitionTime":"2026-01-22T14:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.858217 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.858271 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.858287 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.858311 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.858326 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:40Z","lastTransitionTime":"2026-01-22T14:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.962093 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.962158 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.962174 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.962193 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:40 crc kubenswrapper[5017]: I0122 14:03:40.962207 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:40Z","lastTransitionTime":"2026-01-22T14:03:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.064550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.064590 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.064598 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.064614 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.064623 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:41Z","lastTransitionTime":"2026-01-22T14:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.167357 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.167411 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.167431 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.167457 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.167476 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:41Z","lastTransitionTime":"2026-01-22T14:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.231731 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 14:11:24.317062263 +0000 UTC Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.257313 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.257381 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.257313 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:41 crc kubenswrapper[5017]: E0122 14:03:41.257562 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:41 crc kubenswrapper[5017]: E0122 14:03:41.257606 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:41 crc kubenswrapper[5017]: E0122 14:03:41.257762 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.269566 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.269648 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.269679 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.269751 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.269775 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:41Z","lastTransitionTime":"2026-01-22T14:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.372337 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.372409 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.372427 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.372451 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.372467 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:41Z","lastTransitionTime":"2026-01-22T14:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.474774 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.474829 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.474838 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.474850 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.474860 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:41Z","lastTransitionTime":"2026-01-22T14:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.578446 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.578497 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.578520 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.578567 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.578592 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:41Z","lastTransitionTime":"2026-01-22T14:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.582046 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:41 crc kubenswrapper[5017]: E0122 14:03:41.582239 5017 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:41 crc kubenswrapper[5017]: E0122 14:03:41.582339 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs podName:bca74e01-1d78-4eeb-b3db-842fda0babd7 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:43.582310825 +0000 UTC m=+38.574772723 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs") pod "network-metrics-daemon-gc29v" (UID: "bca74e01-1d78-4eeb-b3db-842fda0babd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.681614 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.681652 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.681661 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.681675 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.681685 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:41Z","lastTransitionTime":"2026-01-22T14:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.784203 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.784252 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.784264 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.784279 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.784288 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:41Z","lastTransitionTime":"2026-01-22T14:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.886667 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.886709 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.886720 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.886735 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.886748 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:41Z","lastTransitionTime":"2026-01-22T14:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.988907 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.988950 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.988959 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.988976 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:41 crc kubenswrapper[5017]: I0122 14:03:41.988986 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:41Z","lastTransitionTime":"2026-01-22T14:03:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.091011 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.091082 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.091095 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.091140 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.091155 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:42Z","lastTransitionTime":"2026-01-22T14:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.193478 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.193521 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.193531 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.193545 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.193554 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:42Z","lastTransitionTime":"2026-01-22T14:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.232302 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 22:52:10.144120109 +0000 UTC Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.257550 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:42 crc kubenswrapper[5017]: E0122 14:03:42.257686 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.296584 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.296668 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.296692 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.296722 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.296741 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:42Z","lastTransitionTime":"2026-01-22T14:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.399929 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.399977 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.399989 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.400008 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.400026 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:42Z","lastTransitionTime":"2026-01-22T14:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.503305 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.503377 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.503391 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.503411 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.503423 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:42Z","lastTransitionTime":"2026-01-22T14:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.605947 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.606013 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.606032 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.606056 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.606075 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:42Z","lastTransitionTime":"2026-01-22T14:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.708904 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.708990 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.709004 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.709033 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.709050 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:42Z","lastTransitionTime":"2026-01-22T14:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.812305 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.812369 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.812388 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.812412 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.812430 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:42Z","lastTransitionTime":"2026-01-22T14:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.914893 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.914962 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.914980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.915006 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:42 crc kubenswrapper[5017]: I0122 14:03:42.915024 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:42Z","lastTransitionTime":"2026-01-22T14:03:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.017452 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.017511 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.017522 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.017539 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.017551 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:43Z","lastTransitionTime":"2026-01-22T14:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.120565 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.120609 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.120622 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.120639 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.120652 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:43Z","lastTransitionTime":"2026-01-22T14:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.223964 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.224018 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.224030 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.224050 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.224063 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:43Z","lastTransitionTime":"2026-01-22T14:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.232648 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 17:00:58.317890334 +0000 UTC Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.257104 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.257197 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.257181 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:43 crc kubenswrapper[5017]: E0122 14:03:43.257366 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:43 crc kubenswrapper[5017]: E0122 14:03:43.257479 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:43 crc kubenswrapper[5017]: E0122 14:03:43.257641 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.326683 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.326756 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.326780 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.326810 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.326832 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:43Z","lastTransitionTime":"2026-01-22T14:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.429439 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.429490 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.429502 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.429521 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.429536 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:43Z","lastTransitionTime":"2026-01-22T14:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.533179 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.533238 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.533260 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.533292 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.533313 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:43Z","lastTransitionTime":"2026-01-22T14:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.605276 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:43 crc kubenswrapper[5017]: E0122 14:03:43.605460 5017 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:43 crc kubenswrapper[5017]: E0122 14:03:43.605566 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs podName:bca74e01-1d78-4eeb-b3db-842fda0babd7 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:47.605543932 +0000 UTC m=+42.598005800 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs") pod "network-metrics-daemon-gc29v" (UID: "bca74e01-1d78-4eeb-b3db-842fda0babd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.636383 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.636423 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.636435 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.636452 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.636466 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:43Z","lastTransitionTime":"2026-01-22T14:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.739233 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.739286 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.739302 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.739323 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.739333 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:43Z","lastTransitionTime":"2026-01-22T14:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.841660 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.841718 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.841744 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.841766 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.841782 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:43Z","lastTransitionTime":"2026-01-22T14:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.944104 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.944158 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.944167 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.944180 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:43 crc kubenswrapper[5017]: I0122 14:03:43.944189 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:43Z","lastTransitionTime":"2026-01-22T14:03:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.047567 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.047621 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.047635 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.047653 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.047665 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:44Z","lastTransitionTime":"2026-01-22T14:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.149800 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.149837 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.149845 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.149858 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.149867 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:44Z","lastTransitionTime":"2026-01-22T14:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.233013 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 08:20:44.133925451 +0000 UTC Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.252213 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.252263 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.252276 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.252293 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.252305 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:44Z","lastTransitionTime":"2026-01-22T14:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.256741 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:44 crc kubenswrapper[5017]: E0122 14:03:44.257000 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.355255 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.355302 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.355312 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.355330 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.355339 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:44Z","lastTransitionTime":"2026-01-22T14:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.458017 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.458075 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.458092 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.458132 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.458145 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:44Z","lastTransitionTime":"2026-01-22T14:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.560714 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.560757 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.560774 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.560789 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.560800 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:44Z","lastTransitionTime":"2026-01-22T14:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.662696 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.662733 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.662745 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.662762 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.662774 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:44Z","lastTransitionTime":"2026-01-22T14:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.765188 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.765232 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.765241 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.765259 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.765269 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:44Z","lastTransitionTime":"2026-01-22T14:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.867635 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.867681 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.867692 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.867707 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.867718 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:44Z","lastTransitionTime":"2026-01-22T14:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.970006 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.970067 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.970087 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.970141 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:44 crc kubenswrapper[5017]: I0122 14:03:44.970161 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:44Z","lastTransitionTime":"2026-01-22T14:03:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.073482 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.073532 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.073543 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.073561 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.073572 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:45Z","lastTransitionTime":"2026-01-22T14:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.176424 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.176464 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.176473 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.176486 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.176497 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:45Z","lastTransitionTime":"2026-01-22T14:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.233901 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 02:25:59.647112161 +0000 UTC Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.257320 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.257381 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:45 crc kubenswrapper[5017]: E0122 14:03:45.257474 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:45 crc kubenswrapper[5017]: E0122 14:03:45.257773 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.257942 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:45 crc kubenswrapper[5017]: E0122 14:03:45.258078 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.272323 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.278325 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.278387 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.278397 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.278418 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.278429 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:45Z","lastTransitionTime":"2026-01-22T14:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.286433 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.296609 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.308058 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.320904 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.332008 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.348697 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.370014 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.381671 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.381715 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.381725 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.381741 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.381754 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:45Z","lastTransitionTime":"2026-01-22T14:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.384848 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.398258 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.410341 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.420951 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.442137 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fd031af875c811a624370aa334b1dcb52707761008cfbd14ced8d09be91d7c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:35Z\\\",\\\"message\\\":\\\"391 6316 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 14:03:35.599426 6316 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:03:35.599403 6316 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 14:03:35.599463 6316 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:03:35.599471 6316 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:03:35.599485 6316 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:03:35.599506 6316 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:03:35.599513 6316 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:03:35.599516 6316 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:03:35.599528 6316 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:03:35.599547 6316 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:03:35.599556 6316 factory.go:656] Stopping watch factory\\\\nI0122 14:03:35.599558 6316 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:03:35.599584 6316 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:37Z\\\",\\\"message\\\":\\\"led to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z]\\\\nI0122 14:03:37.199963 6464 services_controller.go:434] Service openshift-dns-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-dns-operator 4bf7a6e2-037e-4e09-ad6b-2e7f1059a532 4106 0 2025-02-23 05:12:23 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:dns-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007391ad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.454574 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.467342 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.479345 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.483339 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.483450 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.483462 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.483477 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.483487 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:45Z","lastTransitionTime":"2026-01-22T14:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.490821 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.586289 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.586361 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.586373 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.586393 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.586405 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:45Z","lastTransitionTime":"2026-01-22T14:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.688440 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.688482 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.688491 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.688507 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.688517 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:45Z","lastTransitionTime":"2026-01-22T14:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.790718 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.790764 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.790773 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.790790 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.790800 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:45Z","lastTransitionTime":"2026-01-22T14:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.893418 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.893458 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.893467 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.893482 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.893492 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:45Z","lastTransitionTime":"2026-01-22T14:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.995944 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.995985 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.995994 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.996012 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:45 crc kubenswrapper[5017]: I0122 14:03:45.996024 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:45Z","lastTransitionTime":"2026-01-22T14:03:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.098980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.099072 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.099090 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.099164 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.099197 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:46Z","lastTransitionTime":"2026-01-22T14:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.202068 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.202109 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.202142 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.202159 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.202172 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:46Z","lastTransitionTime":"2026-01-22T14:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.234541 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 09:00:37.529488099 +0000 UTC Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.257045 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:46 crc kubenswrapper[5017]: E0122 14:03:46.257657 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.304021 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.304065 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.304074 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.304099 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.304142 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:46Z","lastTransitionTime":"2026-01-22T14:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.406774 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.406819 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.406827 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.406842 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.406851 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:46Z","lastTransitionTime":"2026-01-22T14:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.509244 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.509288 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.509296 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.509313 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.509322 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:46Z","lastTransitionTime":"2026-01-22T14:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.611585 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.611895 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.612156 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.612378 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.612568 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:46Z","lastTransitionTime":"2026-01-22T14:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.715545 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.715585 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.715596 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.715613 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.715626 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:46Z","lastTransitionTime":"2026-01-22T14:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.818223 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.818289 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.818306 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.818335 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.818363 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:46Z","lastTransitionTime":"2026-01-22T14:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.922309 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.922363 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.922376 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.922395 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:46 crc kubenswrapper[5017]: I0122 14:03:46.922405 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:46Z","lastTransitionTime":"2026-01-22T14:03:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.025336 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.025388 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.025400 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.025416 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.025426 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:47Z","lastTransitionTime":"2026-01-22T14:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.128298 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.128351 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.128360 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.128379 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.128390 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:47Z","lastTransitionTime":"2026-01-22T14:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.230769 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.230814 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.230825 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.230841 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.230853 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:47Z","lastTransitionTime":"2026-01-22T14:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.234982 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 01:41:58.968002774 +0000 UTC Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.258262 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.258303 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:47 crc kubenswrapper[5017]: E0122 14:03:47.258450 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.258471 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:47 crc kubenswrapper[5017]: E0122 14:03:47.258597 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:47 crc kubenswrapper[5017]: E0122 14:03:47.258746 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.333940 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.333989 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.334001 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.334016 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.334028 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:47Z","lastTransitionTime":"2026-01-22T14:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.437198 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.437233 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.437241 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.437257 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.437266 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:47Z","lastTransitionTime":"2026-01-22T14:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.540303 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.540344 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.540355 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.540369 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.540380 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:47Z","lastTransitionTime":"2026-01-22T14:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.643051 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.643101 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.643156 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.643182 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.643199 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:47Z","lastTransitionTime":"2026-01-22T14:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.646984 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:47 crc kubenswrapper[5017]: E0122 14:03:47.647163 5017 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:47 crc kubenswrapper[5017]: E0122 14:03:47.647260 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs podName:bca74e01-1d78-4eeb-b3db-842fda0babd7 nodeName:}" failed. No retries permitted until 2026-01-22 14:03:55.647236483 +0000 UTC m=+50.639698411 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs") pod "network-metrics-daemon-gc29v" (UID: "bca74e01-1d78-4eeb-b3db-842fda0babd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.745713 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.745753 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.745776 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.745789 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.745798 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:47Z","lastTransitionTime":"2026-01-22T14:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.847945 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.847989 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.847999 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.848018 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.848028 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:47Z","lastTransitionTime":"2026-01-22T14:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.950394 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.950469 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.950486 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.950510 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:47 crc kubenswrapper[5017]: I0122 14:03:47.950527 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:47Z","lastTransitionTime":"2026-01-22T14:03:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.052436 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.052494 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.052511 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.052550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.052571 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.154778 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.154839 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.154864 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.154898 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.154923 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.235631 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 12:00:57.098328038 +0000 UTC Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.256766 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:48 crc kubenswrapper[5017]: E0122 14:03:48.256914 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.257622 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.257649 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.257659 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.257674 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.257684 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.360709 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.360754 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.360768 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.360784 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.360798 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.463260 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.463316 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.463333 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.463353 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.463369 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.565587 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.565629 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.565640 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.565656 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.565667 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.667500 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.667570 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.667595 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.667625 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.667648 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.673028 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.673097 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.673108 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.673150 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.673165 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: E0122 14:03:48.687062 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:48Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.692153 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.692215 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.692232 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.692255 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.692268 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: E0122 14:03:48.706033 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:48Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.709664 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.709725 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.709735 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.709770 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.709781 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: E0122 14:03:48.723392 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:48Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.727463 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.727534 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.727552 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.727575 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.727596 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: E0122 14:03:48.744546 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:48Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.750083 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.750142 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.750161 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.750187 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.750205 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: E0122 14:03:48.765443 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:48Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:48 crc kubenswrapper[5017]: E0122 14:03:48.765615 5017 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.769919 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.769948 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.769960 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.769983 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.770003 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.873988 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.874042 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.874064 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.874089 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.874106 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.977637 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.977699 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.977758 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.977785 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:48 crc kubenswrapper[5017]: I0122 14:03:48.977802 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:48Z","lastTransitionTime":"2026-01-22T14:03:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.081292 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.081341 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.081352 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.081375 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.081389 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:49Z","lastTransitionTime":"2026-01-22T14:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.098572 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.099694 5017 scope.go:117] "RemoveContainer" containerID="9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.116220 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.138516 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:37Z\\\",\\\"message\\\":\\\"led to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z]\\\\nI0122 14:03:37.199963 6464 services_controller.go:434] Service openshift-dns-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-dns-operator 4bf7a6e2-037e-4e09-ad6b-2e7f1059a532 4106 0 2025-02-23 05:12:23 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:dns-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007391ad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.153408 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.168395 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.181456 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.183730 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.183833 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.183893 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.183964 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.184020 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:49Z","lastTransitionTime":"2026-01-22T14:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.194592 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.208846 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.223288 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.235916 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 23:24:32.267159612 +0000 UTC Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.239983 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.254151 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.256935 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.256973 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.256983 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:49 crc kubenswrapper[5017]: E0122 14:03:49.257261 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:49 crc kubenswrapper[5017]: E0122 14:03:49.257351 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:49 crc kubenswrapper[5017]: E0122 14:03:49.257401 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.269592 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.281718 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.286020 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.286054 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.286066 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.286079 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.286091 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:49Z","lastTransitionTime":"2026-01-22T14:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.291969 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.311598 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.325969 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.336941 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.350627 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.388781 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.388830 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.388844 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.388861 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.388875 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:49Z","lastTransitionTime":"2026-01-22T14:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.491474 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.491516 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.491526 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.491541 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.491552 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:49Z","lastTransitionTime":"2026-01-22T14:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.557569 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/1.log" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.562084 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3"} Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.562797 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.577142 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.589271 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.593587 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.593663 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.593676 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.593701 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.593717 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:49Z","lastTransitionTime":"2026-01-22T14:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.601877 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.617909 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.635022 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.647536 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.657779 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.678450 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.691391 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.695637 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.695668 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.695680 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.695697 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.695707 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:49Z","lastTransitionTime":"2026-01-22T14:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.703562 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.717027 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.731314 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.748551 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:37Z\\\",\\\"message\\\":\\\"led to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z]\\\\nI0122 14:03:37.199963 6464 services_controller.go:434] Service openshift-dns-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-dns-operator 4bf7a6e2-037e-4e09-ad6b-2e7f1059a532 4106 0 2025-02-23 05:12:23 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:dns-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007391ad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.761507 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.774915 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.789734 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.797785 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.797821 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.797831 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.797845 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.797855 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:49Z","lastTransitionTime":"2026-01-22T14:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.802036 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:49Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.900993 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.901338 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.901351 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.901367 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:49 crc kubenswrapper[5017]: I0122 14:03:49.901378 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:49Z","lastTransitionTime":"2026-01-22T14:03:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.004104 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.004156 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.004165 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.004178 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.004188 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:50Z","lastTransitionTime":"2026-01-22T14:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.105909 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.105966 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.106014 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.106030 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.106040 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:50Z","lastTransitionTime":"2026-01-22T14:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.208296 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.208331 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.208339 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.208352 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.208360 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:50Z","lastTransitionTime":"2026-01-22T14:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.236773 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 14:15:52.666884884 +0000 UTC Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.257309 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:50 crc kubenswrapper[5017]: E0122 14:03:50.257453 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.310553 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.310619 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.310629 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.310646 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.310656 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:50Z","lastTransitionTime":"2026-01-22T14:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.413964 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.414014 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.414030 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.414049 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.414061 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:50Z","lastTransitionTime":"2026-01-22T14:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.516885 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.517047 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.517068 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.517092 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.517143 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:50Z","lastTransitionTime":"2026-01-22T14:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.619823 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.619888 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.619902 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.619918 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.619931 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:50Z","lastTransitionTime":"2026-01-22T14:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.721719 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.721754 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.721780 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.721793 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.721801 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:50Z","lastTransitionTime":"2026-01-22T14:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.824551 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.824608 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.824619 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.824632 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.824642 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:50Z","lastTransitionTime":"2026-01-22T14:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.927137 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.927193 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.927208 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.927231 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:50 crc kubenswrapper[5017]: I0122 14:03:50.927244 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:50Z","lastTransitionTime":"2026-01-22T14:03:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.029885 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.029931 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.029940 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.029962 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.029973 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:51Z","lastTransitionTime":"2026-01-22T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.131970 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.132013 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.132024 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.132037 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.132046 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:51Z","lastTransitionTime":"2026-01-22T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.233892 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.233936 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.233955 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.233971 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.233982 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:51Z","lastTransitionTime":"2026-01-22T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.237380 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 11:18:08.753192454 +0000 UTC Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.257235 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.257296 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:51 crc kubenswrapper[5017]: E0122 14:03:51.257402 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.257235 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:51 crc kubenswrapper[5017]: E0122 14:03:51.257500 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:51 crc kubenswrapper[5017]: E0122 14:03:51.257568 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.336745 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.336775 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.336783 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.336795 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.336803 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:51Z","lastTransitionTime":"2026-01-22T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.439274 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.439327 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.439344 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.439368 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.439385 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:51Z","lastTransitionTime":"2026-01-22T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.542359 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.542400 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.542411 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.542429 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.542438 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:51Z","lastTransitionTime":"2026-01-22T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.571380 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/2.log" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.572197 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/1.log" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.575663 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3" exitCode=1 Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.575716 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.575924 5017 scope.go:117] "RemoveContainer" containerID="9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.582390 5017 scope.go:117] "RemoveContainer" containerID="1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3" Jan 22 14:03:51 crc kubenswrapper[5017]: E0122 14:03:51.582682 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.598687 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.612286 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.624879 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.639893 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.644185 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.644227 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.644237 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.644253 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.644263 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:51Z","lastTransitionTime":"2026-01-22T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.656100 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.675748 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.691719 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.717820 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.732595 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.747001 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.747049 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.747063 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.747083 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.747099 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:51Z","lastTransitionTime":"2026-01-22T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.751515 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.765804 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.780262 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.802707 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:37Z\\\",\\\"message\\\":\\\"led to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z]\\\\nI0122 14:03:37.199963 6464 services_controller.go:434] Service openshift-dns-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-dns-operator 4bf7a6e2-037e-4e09-ad6b-2e7f1059a532 4106 0 2025-02-23 05:12:23 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:dns-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007391ad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:50Z\\\",\\\"message\\\":\\\"_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0122 14:03:49.845153 6671 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:03:49.844840 6671 services_controller.go:454] Service openshift-marketplace/marketplace-operator-metrics for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0122 14:03:49.845218 6671 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.817348 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.831343 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.848310 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.849572 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.850166 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.850187 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.850203 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.850215 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:51Z","lastTransitionTime":"2026-01-22T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.860663 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.952895 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.952933 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.952945 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.952962 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:51 crc kubenswrapper[5017]: I0122 14:03:51.952974 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:51Z","lastTransitionTime":"2026-01-22T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.054862 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.054906 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.054942 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.054961 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.054972 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:52Z","lastTransitionTime":"2026-01-22T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.157696 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.157742 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.157750 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.157764 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.157774 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:52Z","lastTransitionTime":"2026-01-22T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.237917 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 11:59:00.155915688 +0000 UTC Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.257317 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:52 crc kubenswrapper[5017]: E0122 14:03:52.257452 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.259800 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.259836 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.259872 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.259886 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.259896 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:52Z","lastTransitionTime":"2026-01-22T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.362332 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.362375 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.362386 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.362402 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.362414 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:52Z","lastTransitionTime":"2026-01-22T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.464882 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.464929 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.464939 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.464956 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.464967 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:52Z","lastTransitionTime":"2026-01-22T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.566793 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.566842 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.566858 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.566875 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.566885 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:52Z","lastTransitionTime":"2026-01-22T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.581352 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/2.log" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.669461 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.669508 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.669518 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.669536 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.669548 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:52Z","lastTransitionTime":"2026-01-22T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.772584 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.772657 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.772679 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.772708 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.772729 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:52Z","lastTransitionTime":"2026-01-22T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.875559 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.875628 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.875648 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.875673 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.875694 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:52Z","lastTransitionTime":"2026-01-22T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.979517 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.979598 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.979619 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.979686 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:52 crc kubenswrapper[5017]: I0122 14:03:52.979707 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:52Z","lastTransitionTime":"2026-01-22T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.083698 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.083752 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.083766 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.083789 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.083805 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:53Z","lastTransitionTime":"2026-01-22T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.186737 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.186811 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.186829 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.186854 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.186870 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:53Z","lastTransitionTime":"2026-01-22T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.238785 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 13:22:56.586118927 +0000 UTC Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.257491 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.257581 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.257674 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:53 crc kubenswrapper[5017]: E0122 14:03:53.257809 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:53 crc kubenswrapper[5017]: E0122 14:03:53.257973 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:53 crc kubenswrapper[5017]: E0122 14:03:53.258151 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.288953 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.289010 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.289019 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.289033 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.289042 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:53Z","lastTransitionTime":"2026-01-22T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.391783 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.391839 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.391856 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.391881 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.391898 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:53Z","lastTransitionTime":"2026-01-22T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.494779 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.494818 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.494829 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.494843 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.494853 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:53Z","lastTransitionTime":"2026-01-22T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.597415 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.597472 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.597482 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.597499 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.597510 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:53Z","lastTransitionTime":"2026-01-22T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.700562 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.700625 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.700641 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.700666 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.700685 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:53Z","lastTransitionTime":"2026-01-22T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.803399 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.803451 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.803464 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.803484 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.803497 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:53Z","lastTransitionTime":"2026-01-22T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.907022 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.907077 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.907087 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.907104 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:53 crc kubenswrapper[5017]: I0122 14:03:53.907146 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:53Z","lastTransitionTime":"2026-01-22T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.010202 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.010263 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.010275 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.010294 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.010306 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:54Z","lastTransitionTime":"2026-01-22T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.113885 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.113975 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.114012 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.114048 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.114073 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:54Z","lastTransitionTime":"2026-01-22T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.216224 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.216284 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.216293 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.216306 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.216316 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:54Z","lastTransitionTime":"2026-01-22T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.238925 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 06:27:58.158855109 +0000 UTC Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.257246 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:54 crc kubenswrapper[5017]: E0122 14:03:54.257418 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.318690 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.318749 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.318760 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.318773 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.318783 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:54Z","lastTransitionTime":"2026-01-22T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.420861 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.420915 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.420932 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.420954 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.420971 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:54Z","lastTransitionTime":"2026-01-22T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.523354 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.523389 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.523397 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.523411 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.523430 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:54Z","lastTransitionTime":"2026-01-22T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.626502 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.626574 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.626592 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.626620 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.626640 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:54Z","lastTransitionTime":"2026-01-22T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.729516 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.729588 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.729636 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.729655 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.729666 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:54Z","lastTransitionTime":"2026-01-22T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.833277 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.833336 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.833353 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.833375 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.833390 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:54Z","lastTransitionTime":"2026-01-22T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.936937 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.936980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.936992 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.937010 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:54 crc kubenswrapper[5017]: I0122 14:03:54.937022 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:54Z","lastTransitionTime":"2026-01-22T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.039332 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.039382 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.039397 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.039417 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.039432 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:55Z","lastTransitionTime":"2026-01-22T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.142454 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.142494 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.142511 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.142528 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.142538 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:55Z","lastTransitionTime":"2026-01-22T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.239578 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 18:00:05.088883845 +0000 UTC Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.245905 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.246004 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.246029 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.246062 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.246084 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:55Z","lastTransitionTime":"2026-01-22T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.257671 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.257730 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:55 crc kubenswrapper[5017]: E0122 14:03:55.257858 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:55 crc kubenswrapper[5017]: E0122 14:03:55.258152 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.258538 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:55 crc kubenswrapper[5017]: E0122 14:03:55.258715 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.279979 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.295948 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.320655 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.348409 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.348480 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.348500 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.348533 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.348558 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:55Z","lastTransitionTime":"2026-01-22T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.348889 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.362962 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.391792 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.409195 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.427313 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.445268 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.451520 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.451569 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.451584 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.451603 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.451616 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:55Z","lastTransitionTime":"2026-01-22T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.463314 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.487406 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:37Z\\\",\\\"message\\\":\\\"led to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z]\\\\nI0122 14:03:37.199963 6464 services_controller.go:434] Service openshift-dns-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-dns-operator 4bf7a6e2-037e-4e09-ad6b-2e7f1059a532 4106 0 2025-02-23 05:12:23 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:dns-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007391ad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:50Z\\\",\\\"message\\\":\\\"_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0122 14:03:49.845153 6671 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:03:49.844840 6671 services_controller.go:454] Service openshift-marketplace/marketplace-operator-metrics for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0122 14:03:49.845218 6671 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.502650 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.517952 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.532820 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.546223 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.554989 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.555020 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.555028 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.555044 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.555053 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:55Z","lastTransitionTime":"2026-01-22T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.559380 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.573882 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.658892 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.658945 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.658958 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.658987 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.659003 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:55Z","lastTransitionTime":"2026-01-22T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.733379 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:55 crc kubenswrapper[5017]: E0122 14:03:55.733680 5017 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:55 crc kubenswrapper[5017]: E0122 14:03:55.733907 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs podName:bca74e01-1d78-4eeb-b3db-842fda0babd7 nodeName:}" failed. No retries permitted until 2026-01-22 14:04:11.733845733 +0000 UTC m=+66.726307711 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs") pod "network-metrics-daemon-gc29v" (UID: "bca74e01-1d78-4eeb-b3db-842fda0babd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.762334 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.762384 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.762399 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.762422 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.762436 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:55Z","lastTransitionTime":"2026-01-22T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.865037 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.865333 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.865405 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.865468 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.865543 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:55Z","lastTransitionTime":"2026-01-22T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.968726 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.968782 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.968798 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.968821 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:55 crc kubenswrapper[5017]: I0122 14:03:55.968838 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:55Z","lastTransitionTime":"2026-01-22T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.072055 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.072092 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.072101 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.072130 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.072139 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:56Z","lastTransitionTime":"2026-01-22T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.174531 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.174572 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.174581 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.174595 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.174605 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:56Z","lastTransitionTime":"2026-01-22T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.238544 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.238705 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:04:28.238674735 +0000 UTC m=+83.231136603 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.238770 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.238905 5017 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.238967 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:04:28.238951723 +0000 UTC m=+83.231413591 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.240550 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 00:21:11.013874168 +0000 UTC Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.256827 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.256975 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.277706 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.277766 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.277788 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.277815 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.277832 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:56Z","lastTransitionTime":"2026-01-22T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.339930 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.340089 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.340201 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.340280 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.340345 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.340344 5017 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.340370 5017 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.340478 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:04:28.340440673 +0000 UTC m=+83.332902571 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.340523 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:04:28.340507965 +0000 UTC m=+83.332969863 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.340596 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.340637 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.340651 5017 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:56 crc kubenswrapper[5017]: E0122 14:03:56.340728 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:04:28.340706051 +0000 UTC m=+83.333167909 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.380459 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.380515 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.380530 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.380550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.380567 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:56Z","lastTransitionTime":"2026-01-22T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.483812 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.483868 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.483881 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.483900 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.484191 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:56Z","lastTransitionTime":"2026-01-22T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.587440 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.587515 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.587533 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.587559 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.587577 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:56Z","lastTransitionTime":"2026-01-22T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.690232 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.690275 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.690286 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.690302 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.690313 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:56Z","lastTransitionTime":"2026-01-22T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.793386 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.793464 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.793483 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.793511 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.793531 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:56Z","lastTransitionTime":"2026-01-22T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.896570 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.896615 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.896624 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.896642 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.896732 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:56Z","lastTransitionTime":"2026-01-22T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.999757 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:56 crc kubenswrapper[5017]: I0122 14:03:56.999854 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:56.999880 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:56.999923 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:56.999949 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:56Z","lastTransitionTime":"2026-01-22T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.102780 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.102856 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.102881 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.102919 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.102949 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:57Z","lastTransitionTime":"2026-01-22T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.205524 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.205596 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.205606 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.205642 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.205653 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:57Z","lastTransitionTime":"2026-01-22T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.241294 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 05:21:43.465670721 +0000 UTC Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.257060 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.257165 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.257070 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:57 crc kubenswrapper[5017]: E0122 14:03:57.257388 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:57 crc kubenswrapper[5017]: E0122 14:03:57.257498 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:57 crc kubenswrapper[5017]: E0122 14:03:57.257648 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.308764 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.308813 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.308831 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.308858 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.308877 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:57Z","lastTransitionTime":"2026-01-22T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.411728 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.411760 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.411768 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.411780 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.411792 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:57Z","lastTransitionTime":"2026-01-22T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.453920 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.465245 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.469170 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.486827 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.501883 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.512688 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.514661 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.514712 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.514728 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.514753 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.514770 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:57Z","lastTransitionTime":"2026-01-22T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.523761 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.544554 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.561185 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.577674 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.590997 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.604594 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.617192 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.617241 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.617252 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.617302 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.617315 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:57Z","lastTransitionTime":"2026-01-22T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.622041 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.636923 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.650084 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.662830 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.676580 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.688314 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.707750 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b90f5cdcf3aa2f6311bff1f371cd5f563ed2243c92e2ccb38fc1dac80f2b06c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:37Z\\\",\\\"message\\\":\\\"led to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:37Z is after 2025-08-24T17:21:41Z]\\\\nI0122 14:03:37.199963 6464 services_controller.go:434] Service openshift-dns-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-dns-operator 4bf7a6e2-037e-4e09-ad6b-2e7f1059a532 4106 0 2025-02-23 05:12:23 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:dns-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007391ad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:50Z\\\",\\\"message\\\":\\\"_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0122 14:03:49.845153 6671 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:03:49.844840 6671 services_controller.go:454] Service openshift-marketplace/marketplace-operator-metrics for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0122 14:03:49.845218 6671 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:57Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.719507 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.719551 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.719561 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.719578 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.719587 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:57Z","lastTransitionTime":"2026-01-22T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.821894 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.821942 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.821956 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.821973 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.821985 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:57Z","lastTransitionTime":"2026-01-22T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.925417 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.925468 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.925483 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.925506 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:57 crc kubenswrapper[5017]: I0122 14:03:57.925518 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:57Z","lastTransitionTime":"2026-01-22T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.028708 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.028759 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.028772 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.028793 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.028805 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:58Z","lastTransitionTime":"2026-01-22T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.131126 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.131164 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.131174 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.131188 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.131198 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:58Z","lastTransitionTime":"2026-01-22T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.233677 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.233727 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.233740 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.233755 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.233765 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:58Z","lastTransitionTime":"2026-01-22T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.241818 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 13:31:36.402480652 +0000 UTC Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.257136 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:03:58 crc kubenswrapper[5017]: E0122 14:03:58.257254 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.336393 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.336469 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.336482 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.336522 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.336536 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:58Z","lastTransitionTime":"2026-01-22T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.439076 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.439142 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.439154 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.439171 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.439181 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:58Z","lastTransitionTime":"2026-01-22T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.542276 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.542309 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.542320 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.542339 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.542350 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:58Z","lastTransitionTime":"2026-01-22T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.645037 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.645157 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.645179 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.645207 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.645225 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:58Z","lastTransitionTime":"2026-01-22T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.747011 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.747059 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.747070 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.747088 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.747099 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:58Z","lastTransitionTime":"2026-01-22T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.850559 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.850609 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.850632 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.850676 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.850698 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:58Z","lastTransitionTime":"2026-01-22T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.954296 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.954369 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.954391 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.954419 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:58 crc kubenswrapper[5017]: I0122 14:03:58.954439 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:58Z","lastTransitionTime":"2026-01-22T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.057241 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.057308 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.057321 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.057334 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.057343 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.124047 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.124091 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.124101 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.124138 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.124149 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: E0122 14:03:59.138671 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:59Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.143506 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.143559 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.143573 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.143591 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.143604 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: E0122 14:03:59.159510 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:59Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.163823 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.163859 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.163869 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.163884 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.163900 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: E0122 14:03:59.181380 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:59Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.187182 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.187268 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.187281 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.187303 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.187320 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: E0122 14:03:59.207090 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:59Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.212951 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.213008 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.213023 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.213046 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.213061 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: E0122 14:03:59.228550 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:03:59Z is after 2025-08-24T17:21:41Z" Jan 22 14:03:59 crc kubenswrapper[5017]: E0122 14:03:59.228803 5017 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.230701 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.230759 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.230777 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.230804 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.230825 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.242217 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 09:15:06.00214498 +0000 UTC Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.257616 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.257665 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.257665 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:03:59 crc kubenswrapper[5017]: E0122 14:03:59.257789 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:03:59 crc kubenswrapper[5017]: E0122 14:03:59.257949 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:03:59 crc kubenswrapper[5017]: E0122 14:03:59.258152 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.334074 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.334161 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.334177 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.334201 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.334216 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.437438 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.437498 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.437509 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.437531 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.437543 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.540243 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.540286 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.540325 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.540342 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.540362 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.642931 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.642998 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.643010 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.643043 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.643058 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.745422 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.745463 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.745472 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.745486 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.745496 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.848239 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.848279 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.848287 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.848300 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.848309 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.950775 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.950811 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.950819 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.950832 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:03:59 crc kubenswrapper[5017]: I0122 14:03:59.950841 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:03:59Z","lastTransitionTime":"2026-01-22T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.054879 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.054940 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.054958 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.054989 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.055007 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:00Z","lastTransitionTime":"2026-01-22T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.157883 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.157922 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.157932 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.157954 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.157963 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:00Z","lastTransitionTime":"2026-01-22T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.243410 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 19:04:40.257136971 +0000 UTC Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.256656 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:00 crc kubenswrapper[5017]: E0122 14:04:00.256795 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.260549 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.260609 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.260622 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.260643 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.260654 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:00Z","lastTransitionTime":"2026-01-22T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.362652 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.362707 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.362716 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.362732 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.362744 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:00Z","lastTransitionTime":"2026-01-22T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.464945 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.464991 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.465004 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.465023 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.465035 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:00Z","lastTransitionTime":"2026-01-22T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.567670 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.567707 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.567719 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.567734 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.567746 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:00Z","lastTransitionTime":"2026-01-22T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.670052 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.670082 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.670092 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.670121 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.670132 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:00Z","lastTransitionTime":"2026-01-22T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.772337 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.772365 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.772376 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.772389 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.772397 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:00Z","lastTransitionTime":"2026-01-22T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.874584 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.874639 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.874649 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.874670 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.874686 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:00Z","lastTransitionTime":"2026-01-22T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.976861 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.976899 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.976908 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.976925 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:00 crc kubenswrapper[5017]: I0122 14:04:00.976934 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:00Z","lastTransitionTime":"2026-01-22T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.079258 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.079294 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.079305 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.079320 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.079331 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:01Z","lastTransitionTime":"2026-01-22T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.181264 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.181550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.181613 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.181686 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.181757 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:01Z","lastTransitionTime":"2026-01-22T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.244161 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 18:50:19.366667841 +0000 UTC Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.257650 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.257742 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.257650 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:01 crc kubenswrapper[5017]: E0122 14:04:01.257925 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:01 crc kubenswrapper[5017]: E0122 14:04:01.258057 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:01 crc kubenswrapper[5017]: E0122 14:04:01.258200 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.284663 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.284706 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.284715 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.284730 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.284739 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:01Z","lastTransitionTime":"2026-01-22T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.387286 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.387363 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.387373 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.387390 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.387405 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:01Z","lastTransitionTime":"2026-01-22T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.490232 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.490279 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.490290 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.490308 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.490319 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:01Z","lastTransitionTime":"2026-01-22T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.592569 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.592848 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.592859 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.592874 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.592885 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:01Z","lastTransitionTime":"2026-01-22T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.695731 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.695805 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.695820 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.695842 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.695857 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:01Z","lastTransitionTime":"2026-01-22T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.798542 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.798593 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.798602 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.798616 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.798625 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:01Z","lastTransitionTime":"2026-01-22T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.906857 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.906908 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.906922 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.906942 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:01 crc kubenswrapper[5017]: I0122 14:04:01.906958 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:01Z","lastTransitionTime":"2026-01-22T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.009918 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.009968 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.009980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.010000 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.010015 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:02Z","lastTransitionTime":"2026-01-22T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.111939 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.111978 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.111986 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.112002 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.112014 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:02Z","lastTransitionTime":"2026-01-22T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.215225 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.215276 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.215286 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.215301 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.215310 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:02Z","lastTransitionTime":"2026-01-22T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.245134 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 20:31:02.222632905 +0000 UTC Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.257446 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:02 crc kubenswrapper[5017]: E0122 14:04:02.257572 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.318149 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.318179 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.318187 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.318201 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.318210 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:02Z","lastTransitionTime":"2026-01-22T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.421207 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.421251 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.421261 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.421281 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.421290 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:02Z","lastTransitionTime":"2026-01-22T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.523894 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.523936 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.523947 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.523964 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.523975 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:02Z","lastTransitionTime":"2026-01-22T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.627342 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.627398 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.627421 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.627448 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.627465 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:02Z","lastTransitionTime":"2026-01-22T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.730161 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.730207 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.730224 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.730241 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.730262 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:02Z","lastTransitionTime":"2026-01-22T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.831818 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.831868 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.831880 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.831897 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.831910 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:02Z","lastTransitionTime":"2026-01-22T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.934777 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.934843 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.934865 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.934893 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:02 crc kubenswrapper[5017]: I0122 14:04:02.934918 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:02Z","lastTransitionTime":"2026-01-22T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.037703 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.037780 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.037791 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.037810 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.037822 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:03Z","lastTransitionTime":"2026-01-22T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.140299 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.140338 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.140346 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.140361 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.140370 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:03Z","lastTransitionTime":"2026-01-22T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.243781 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.243873 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.243899 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.243932 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.243956 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:03Z","lastTransitionTime":"2026-01-22T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.245949 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 14:26:10.68269273 +0000 UTC Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.257442 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.257442 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.257587 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:03 crc kubenswrapper[5017]: E0122 14:04:03.257784 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:03 crc kubenswrapper[5017]: E0122 14:04:03.257880 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:03 crc kubenswrapper[5017]: E0122 14:04:03.257989 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.346546 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.346613 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.346631 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.346660 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.346677 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:03Z","lastTransitionTime":"2026-01-22T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.449523 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.449581 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.449598 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.449625 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.449647 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:03Z","lastTransitionTime":"2026-01-22T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.552648 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.552695 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.552707 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.552724 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.552733 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:03Z","lastTransitionTime":"2026-01-22T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.655277 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.655334 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.655346 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.655360 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.655369 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:03Z","lastTransitionTime":"2026-01-22T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.758273 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.758336 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.758353 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.758378 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.758401 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:03Z","lastTransitionTime":"2026-01-22T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.860031 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.860069 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.860080 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.860092 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.860107 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:03Z","lastTransitionTime":"2026-01-22T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.962171 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.962208 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.962219 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.962235 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:03 crc kubenswrapper[5017]: I0122 14:04:03.962246 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:03Z","lastTransitionTime":"2026-01-22T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.065724 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.065800 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.065822 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.065850 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.065875 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:04Z","lastTransitionTime":"2026-01-22T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.168772 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.168815 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.168824 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.168840 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.168853 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:04Z","lastTransitionTime":"2026-01-22T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.246510 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 13:25:49.965270235 +0000 UTC Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.256790 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:04 crc kubenswrapper[5017]: E0122 14:04:04.257261 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.257755 5017 scope.go:117] "RemoveContainer" containerID="1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3" Jan 22 14:04:04 crc kubenswrapper[5017]: E0122 14:04:04.258056 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.271822 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.271873 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.271883 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.271904 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.271918 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:04Z","lastTransitionTime":"2026-01-22T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.273066 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.287098 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.300501 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.313830 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.334948 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.349982 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.365584 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.374867 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.374903 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.374915 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.374932 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.374944 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:04Z","lastTransitionTime":"2026-01-22T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.378912 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.389533 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.402865 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.416138 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.429268 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.441680 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.452083 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.469590 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:50Z\\\",\\\"message\\\":\\\"_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0122 14:03:49.845153 6671 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:03:49.844840 6671 services_controller.go:454] Service openshift-marketplace/marketplace-operator-metrics for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0122 14:03:49.845218 6671 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.477256 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.477327 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.477343 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.477360 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.477370 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:04Z","lastTransitionTime":"2026-01-22T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.482196 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.499664 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.519821 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:04Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.580708 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.580762 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.580777 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.580800 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.580816 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:04Z","lastTransitionTime":"2026-01-22T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.683493 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.683524 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.683534 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.683550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.683558 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:04Z","lastTransitionTime":"2026-01-22T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.787081 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.787132 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.787145 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.787161 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.787173 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:04Z","lastTransitionTime":"2026-01-22T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.888844 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.888918 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.888933 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.888972 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.888992 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:04Z","lastTransitionTime":"2026-01-22T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.992212 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.992304 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.992324 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.992371 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:04 crc kubenswrapper[5017]: I0122 14:04:04.992411 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:04Z","lastTransitionTime":"2026-01-22T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.094436 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.094470 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.094480 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.094492 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.094501 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:05Z","lastTransitionTime":"2026-01-22T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.196948 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.196987 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.197016 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.197032 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.197042 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:05Z","lastTransitionTime":"2026-01-22T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.247487 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 18:30:43.629259695 +0000 UTC Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.257064 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:05 crc kubenswrapper[5017]: E0122 14:04:05.257305 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.257356 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.257356 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:05 crc kubenswrapper[5017]: E0122 14:04:05.257569 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:05 crc kubenswrapper[5017]: E0122 14:04:05.257662 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.271857 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.291055 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.298821 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.298853 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.298864 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.298881 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.298893 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:05Z","lastTransitionTime":"2026-01-22T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.305407 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.317099 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.328086 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.337066 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.355707 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:50Z\\\",\\\"message\\\":\\\"_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0122 14:03:49.845153 6671 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:03:49.844840 6671 services_controller.go:454] Service openshift-marketplace/marketplace-operator-metrics for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0122 14:03:49.845218 6671 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.367839 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.380660 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.391100 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.401102 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.401157 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.401169 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.401186 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.401221 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:05Z","lastTransitionTime":"2026-01-22T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.406279 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.416854 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.426523 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.444570 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.456050 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.467057 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.477165 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.486592 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:05Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.510597 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.510640 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.510651 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.510667 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.510678 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:05Z","lastTransitionTime":"2026-01-22T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.612808 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.612849 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.612858 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.612872 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.612882 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:05Z","lastTransitionTime":"2026-01-22T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.715261 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.715317 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.715336 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.715354 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.715370 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:05Z","lastTransitionTime":"2026-01-22T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.944823 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.944883 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.944897 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.944920 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:05 crc kubenswrapper[5017]: I0122 14:04:05.944935 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:05Z","lastTransitionTime":"2026-01-22T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.048027 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.048063 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.048071 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.048084 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.048093 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:06Z","lastTransitionTime":"2026-01-22T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.150312 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.150345 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.150355 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.150368 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.150377 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:06Z","lastTransitionTime":"2026-01-22T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.248643 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 14:07:07.1868688 +0000 UTC Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.252464 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.252504 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.252515 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.252534 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.252549 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:06Z","lastTransitionTime":"2026-01-22T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.256642 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:06 crc kubenswrapper[5017]: E0122 14:04:06.256742 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.355768 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.355821 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.355835 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.355859 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.355873 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:06Z","lastTransitionTime":"2026-01-22T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.459007 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.459053 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.459063 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.459079 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.459093 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:06Z","lastTransitionTime":"2026-01-22T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.561735 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.561779 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.561795 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.561814 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.561826 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:06Z","lastTransitionTime":"2026-01-22T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.664755 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.664800 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.664809 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.664837 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.664847 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:06Z","lastTransitionTime":"2026-01-22T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.766997 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.767060 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.767071 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.767088 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.767101 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:06Z","lastTransitionTime":"2026-01-22T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.869817 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.869873 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.869890 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.869912 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.869924 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:06Z","lastTransitionTime":"2026-01-22T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.971614 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.971654 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.971665 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.971680 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:06 crc kubenswrapper[5017]: I0122 14:04:06.971689 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:06Z","lastTransitionTime":"2026-01-22T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.074051 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.074154 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.074168 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.074182 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.074192 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:07Z","lastTransitionTime":"2026-01-22T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.175904 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.175938 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.175946 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.175959 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.175967 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:07Z","lastTransitionTime":"2026-01-22T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.249720 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 15:13:52.907351437 +0000 UTC Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.259469 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:07 crc kubenswrapper[5017]: E0122 14:04:07.259570 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.259695 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.259776 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:07 crc kubenswrapper[5017]: E0122 14:04:07.259854 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:07 crc kubenswrapper[5017]: E0122 14:04:07.259960 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.278416 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.278461 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.278471 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.278487 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.278496 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:07Z","lastTransitionTime":"2026-01-22T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.381096 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.381179 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.381188 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.381203 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.381211 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:07Z","lastTransitionTime":"2026-01-22T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.483977 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.484012 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.484020 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.484032 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.484041 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:07Z","lastTransitionTime":"2026-01-22T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.586363 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.586447 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.586460 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.586476 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.586490 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:07Z","lastTransitionTime":"2026-01-22T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.688923 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.688964 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.688978 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.688994 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.689003 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:07Z","lastTransitionTime":"2026-01-22T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.792127 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.792184 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.792195 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.792212 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.792224 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:07Z","lastTransitionTime":"2026-01-22T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.894693 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.894739 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.894753 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.894815 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.894829 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:07Z","lastTransitionTime":"2026-01-22T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.997535 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.997624 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.997637 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.997855 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:07 crc kubenswrapper[5017]: I0122 14:04:07.997880 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:07Z","lastTransitionTime":"2026-01-22T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.100225 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.100262 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.100270 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.100284 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.100294 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:08Z","lastTransitionTime":"2026-01-22T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.202392 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.202446 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.202455 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.202472 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.202481 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:08Z","lastTransitionTime":"2026-01-22T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.250751 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 21:22:27.150674523 +0000 UTC Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.257184 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:08 crc kubenswrapper[5017]: E0122 14:04:08.257340 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.304595 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.304626 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.304634 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.304652 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.304661 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:08Z","lastTransitionTime":"2026-01-22T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.406580 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.406626 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.406637 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.406653 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.406664 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:08Z","lastTransitionTime":"2026-01-22T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.509958 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.510019 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.510038 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.510052 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.510062 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:08Z","lastTransitionTime":"2026-01-22T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.612248 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.612294 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.612306 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.612323 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.612336 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:08Z","lastTransitionTime":"2026-01-22T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.715289 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.715336 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.715350 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.715368 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.715381 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:08Z","lastTransitionTime":"2026-01-22T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.817744 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.817803 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.817814 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.817828 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.817838 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:08Z","lastTransitionTime":"2026-01-22T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.920339 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.920380 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.920389 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.920404 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:08 crc kubenswrapper[5017]: I0122 14:04:08.920413 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:08Z","lastTransitionTime":"2026-01-22T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.023094 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.023156 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.023169 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.023184 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.023197 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.125694 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.125746 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.125758 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.125777 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.125789 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.229372 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.229442 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.229456 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.229476 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.229495 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.251482 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 05:47:20.614093699 +0000 UTC Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.256768 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.256773 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:09 crc kubenswrapper[5017]: E0122 14:04:09.256932 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.256779 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:09 crc kubenswrapper[5017]: E0122 14:04:09.257018 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:09 crc kubenswrapper[5017]: E0122 14:04:09.257183 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.332255 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.332288 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.332299 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.332313 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.332323 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.434980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.435023 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.435033 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.435047 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.435057 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.537188 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.537226 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.537235 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.537249 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.537257 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.569490 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.569541 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.569552 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.569570 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.569582 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: E0122 14:04:09.583639 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.587838 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.587879 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.587889 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.587903 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.587912 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: E0122 14:04:09.599389 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.602909 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.602958 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.602976 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.602997 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.603009 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: E0122 14:04:09.615612 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.618595 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.618655 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.618673 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.618699 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.618714 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: E0122 14:04:09.630683 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.635425 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.635469 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.635478 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.635494 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.635505 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: E0122 14:04:09.653657 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:09 crc kubenswrapper[5017]: E0122 14:04:09.653779 5017 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.654753 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.654781 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.654792 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.654806 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.654814 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.757306 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.757355 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.757369 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.757388 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.757400 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.859169 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.859215 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.859227 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.859244 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.859260 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.960920 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.960962 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.960973 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.960989 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:09 crc kubenswrapper[5017]: I0122 14:04:09.961000 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:09Z","lastTransitionTime":"2026-01-22T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.063025 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.063077 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.063093 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.063130 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.063142 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:10Z","lastTransitionTime":"2026-01-22T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.164817 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.164902 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.164920 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.164938 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.164953 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:10Z","lastTransitionTime":"2026-01-22T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.253173 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 04:47:57.390388188 +0000 UTC Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.257453 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:10 crc kubenswrapper[5017]: E0122 14:04:10.257568 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.266654 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.266679 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.266689 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.266703 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.266714 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:10Z","lastTransitionTime":"2026-01-22T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.369040 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.369098 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.369179 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.369201 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.369217 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:10Z","lastTransitionTime":"2026-01-22T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.471202 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.471238 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.471248 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.471263 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.471272 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:10Z","lastTransitionTime":"2026-01-22T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.573862 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.573911 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.573924 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.573944 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.573956 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:10Z","lastTransitionTime":"2026-01-22T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.676039 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.676071 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.676082 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.676098 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.676148 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:10Z","lastTransitionTime":"2026-01-22T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.778728 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.778765 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.779168 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.779204 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.779215 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:10Z","lastTransitionTime":"2026-01-22T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.881911 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.881947 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.881960 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.881975 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.881983 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:10Z","lastTransitionTime":"2026-01-22T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.984736 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.984792 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.984809 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.984826 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:10 crc kubenswrapper[5017]: I0122 14:04:10.984837 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:10Z","lastTransitionTime":"2026-01-22T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.087431 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.087488 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.087501 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.087518 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.087529 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:11Z","lastTransitionTime":"2026-01-22T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.189787 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.189829 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.189839 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.189853 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.189861 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:11Z","lastTransitionTime":"2026-01-22T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.253803 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 17:40:54.337966471 +0000 UTC Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.257144 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.257169 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.257169 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:11 crc kubenswrapper[5017]: E0122 14:04:11.257316 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:11 crc kubenswrapper[5017]: E0122 14:04:11.257410 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:11 crc kubenswrapper[5017]: E0122 14:04:11.257712 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.292505 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.292578 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.292590 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.292607 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.292630 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:11Z","lastTransitionTime":"2026-01-22T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.395537 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.395585 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.395596 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.395614 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.395629 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:11Z","lastTransitionTime":"2026-01-22T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.497673 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.497884 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.497926 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.497948 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.497966 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:11Z","lastTransitionTime":"2026-01-22T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.600317 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.600369 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.600379 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.600394 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.600406 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:11Z","lastTransitionTime":"2026-01-22T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.702696 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.702753 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.702765 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.702783 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.702798 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:11Z","lastTransitionTime":"2026-01-22T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.800421 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:11 crc kubenswrapper[5017]: E0122 14:04:11.800596 5017 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:04:11 crc kubenswrapper[5017]: E0122 14:04:11.800674 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs podName:bca74e01-1d78-4eeb-b3db-842fda0babd7 nodeName:}" failed. No retries permitted until 2026-01-22 14:04:43.800657306 +0000 UTC m=+98.793119164 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs") pod "network-metrics-daemon-gc29v" (UID: "bca74e01-1d78-4eeb-b3db-842fda0babd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.804287 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.804374 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.804385 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.804405 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.804413 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:11Z","lastTransitionTime":"2026-01-22T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.906803 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.906838 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.906847 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.906859 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:11 crc kubenswrapper[5017]: I0122 14:04:11.906868 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:11Z","lastTransitionTime":"2026-01-22T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.008597 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.008629 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.008692 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.008705 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.008715 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:12Z","lastTransitionTime":"2026-01-22T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.111256 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.111289 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.111297 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.111310 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.111320 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:12Z","lastTransitionTime":"2026-01-22T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.213747 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.213779 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.213791 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.213807 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.213821 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:12Z","lastTransitionTime":"2026-01-22T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.254600 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 00:15:21.606584698 +0000 UTC Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.256935 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:12 crc kubenswrapper[5017]: E0122 14:04:12.257068 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.315641 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.315679 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.315689 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.315703 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.315711 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:12Z","lastTransitionTime":"2026-01-22T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.419225 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.419254 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.419262 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.419274 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.419283 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:12Z","lastTransitionTime":"2026-01-22T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.521641 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.521723 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.521739 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.521761 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.521777 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:12Z","lastTransitionTime":"2026-01-22T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.625018 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.625081 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.625094 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.625131 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.625146 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:12Z","lastTransitionTime":"2026-01-22T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.727965 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.728018 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.728038 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.728063 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.728081 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:12Z","lastTransitionTime":"2026-01-22T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.831060 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.831099 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.831140 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.831156 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.831167 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:12Z","lastTransitionTime":"2026-01-22T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.934016 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.934068 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.934083 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.934129 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:12 crc kubenswrapper[5017]: I0122 14:04:12.934144 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:12Z","lastTransitionTime":"2026-01-22T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.036087 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.036167 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.036185 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.036205 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.036221 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:13Z","lastTransitionTime":"2026-01-22T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.138371 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.138407 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.138415 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.138429 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.138438 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:13Z","lastTransitionTime":"2026-01-22T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.239916 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.239969 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.239980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.239994 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.240002 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:13Z","lastTransitionTime":"2026-01-22T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.255624 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 06:58:57.657108434 +0000 UTC Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.256885 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.257011 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:13 crc kubenswrapper[5017]: E0122 14:04:13.257019 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.256884 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:13 crc kubenswrapper[5017]: E0122 14:04:13.257254 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:13 crc kubenswrapper[5017]: E0122 14:04:13.257387 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.342346 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.342385 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.342397 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.342430 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.342441 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:13Z","lastTransitionTime":"2026-01-22T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.445342 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.445376 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.445384 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.445398 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.445406 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:13Z","lastTransitionTime":"2026-01-22T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.547518 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.547557 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.547570 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.547590 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.547603 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:13Z","lastTransitionTime":"2026-01-22T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.650213 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.650244 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.650251 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.650263 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.650272 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:13Z","lastTransitionTime":"2026-01-22T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.752515 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.752551 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.752564 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.752579 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.752590 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:13Z","lastTransitionTime":"2026-01-22T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.855622 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.855658 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.855669 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.855686 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.855695 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:13Z","lastTransitionTime":"2026-01-22T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.958674 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.958699 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.958708 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.958722 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.958730 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:13Z","lastTransitionTime":"2026-01-22T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.970050 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2qmt_c223e7fe-8360-4731-a32d-3cf60ed7324b/kube-multus/0.log" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.970105 5017 generic.go:334] "Generic (PLEG): container finished" podID="c223e7fe-8360-4731-a32d-3cf60ed7324b" containerID="ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd" exitCode=1 Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.970170 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2qmt" event={"ID":"c223e7fe-8360-4731-a32d-3cf60ed7324b","Type":"ContainerDied","Data":"ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd"} Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.970581 5017 scope.go:117] "RemoveContainer" containerID="ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.983576 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:13Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:13 crc kubenswrapper[5017]: I0122 14:04:13.993990 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:13Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.013813 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.027254 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.041790 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.058732 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.061904 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.061945 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.061953 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.061969 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.061977 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:14Z","lastTransitionTime":"2026-01-22T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.072506 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.090496 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:50Z\\\",\\\"message\\\":\\\"_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0122 14:03:49.845153 6671 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:03:49.844840 6671 services_controller.go:454] Service openshift-marketplace/marketplace-operator-metrics for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0122 14:03:49.845218 6671 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.102737 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.118066 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.131712 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.143816 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.156289 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.164720 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.164761 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.164772 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.164789 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.164802 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:14Z","lastTransitionTime":"2026-01-22T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.171932 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.181793 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.191776 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.203311 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.214504 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"2026-01-22T14:03:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634\\\\n2026-01-22T14:03:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634 to /host/opt/cni/bin/\\\\n2026-01-22T14:03:28Z [verbose] multus-daemon started\\\\n2026-01-22T14:03:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:04:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.256165 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 02:50:41.703298901 +0000 UTC Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.257399 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:14 crc kubenswrapper[5017]: E0122 14:04:14.257559 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.267428 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.267492 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.267503 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.267518 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.267526 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:14Z","lastTransitionTime":"2026-01-22T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.370725 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.370771 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.370783 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.370801 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.370813 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:14Z","lastTransitionTime":"2026-01-22T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.472979 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.473015 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.473024 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.473039 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.473056 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:14Z","lastTransitionTime":"2026-01-22T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.575594 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.575829 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.575928 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.576001 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.576063 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:14Z","lastTransitionTime":"2026-01-22T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.678688 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.679015 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.679134 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.679235 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.679324 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:14Z","lastTransitionTime":"2026-01-22T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.781101 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.781370 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.781481 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.781556 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.781636 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:14Z","lastTransitionTime":"2026-01-22T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.884375 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.884416 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.884426 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.884443 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.884453 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:14Z","lastTransitionTime":"2026-01-22T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.974671 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2qmt_c223e7fe-8360-4731-a32d-3cf60ed7324b/kube-multus/0.log" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.974983 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2qmt" event={"ID":"c223e7fe-8360-4731-a32d-3cf60ed7324b","Type":"ContainerStarted","Data":"5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.986618 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.986673 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.986681 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.986694 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.986702 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:14Z","lastTransitionTime":"2026-01-22T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:14 crc kubenswrapper[5017]: I0122 14:04:14.989812 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:14Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.002487 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.012310 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.032369 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:50Z\\\",\\\"message\\\":\\\"_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0122 14:03:49.845153 6671 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:03:49.844840 6671 services_controller.go:454] Service openshift-marketplace/marketplace-operator-metrics for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0122 14:03:49.845218 6671 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.043735 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.055652 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.067620 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.081374 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.089492 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.089536 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.089550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.089568 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.089581 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:15Z","lastTransitionTime":"2026-01-22T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.094884 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.112406 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"2026-01-22T14:03:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634\\\\n2026-01-22T14:03:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634 to /host/opt/cni/bin/\\\\n2026-01-22T14:03:28Z [verbose] multus-daemon started\\\\n2026-01-22T14:03:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:04:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:04:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.123247 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.135714 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.149338 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.160886 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.171699 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.180605 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.191885 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.191933 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.191945 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.191962 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.191974 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:15Z","lastTransitionTime":"2026-01-22T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.212218 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.232310 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.256650 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 19:38:34.513841345 +0000 UTC Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.256795 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.256853 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:15 crc kubenswrapper[5017]: E0122 14:04:15.256889 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:15 crc kubenswrapper[5017]: E0122 14:04:15.256994 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.257036 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:15 crc kubenswrapper[5017]: E0122 14:04:15.257200 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.273977 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.290049 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"2026-01-22T14:03:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634\\\\n2026-01-22T14:03:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634 to /host/opt/cni/bin/\\\\n2026-01-22T14:03:28Z [verbose] multus-daemon started\\\\n2026-01-22T14:03:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:04:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:04:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.293841 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.293874 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.293884 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.293902 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.293912 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:15Z","lastTransitionTime":"2026-01-22T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.306914 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.320373 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.348712 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.365816 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.378214 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.389841 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.396835 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.396867 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.396880 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.397315 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.397342 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:15Z","lastTransitionTime":"2026-01-22T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.404975 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.419156 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.431566 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.444421 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.458249 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.468555 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.490546 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:50Z\\\",\\\"message\\\":\\\"_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0122 14:03:49.845153 6671 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:03:49.844840 6671 services_controller.go:454] Service openshift-marketplace/marketplace-operator-metrics for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0122 14:03:49.845218 6671 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.499259 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.499301 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.499311 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.499329 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.499340 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:15Z","lastTransitionTime":"2026-01-22T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.503068 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.516372 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.531923 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:15Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.601260 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.601307 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.601317 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.601352 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.601362 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:15Z","lastTransitionTime":"2026-01-22T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.703498 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.703536 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.703549 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.703564 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.703575 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:15Z","lastTransitionTime":"2026-01-22T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.806403 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.806446 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.806457 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.806475 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.806486 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:15Z","lastTransitionTime":"2026-01-22T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.908708 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.908748 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.908757 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.908771 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:15 crc kubenswrapper[5017]: I0122 14:04:15.908780 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:15Z","lastTransitionTime":"2026-01-22T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.011235 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.011283 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.011296 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.011313 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.011325 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:16Z","lastTransitionTime":"2026-01-22T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.113634 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.113678 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.113691 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.113708 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.113718 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:16Z","lastTransitionTime":"2026-01-22T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.216353 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.216427 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.216440 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.216459 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.216473 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:16Z","lastTransitionTime":"2026-01-22T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.257442 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 07:25:26.873037161 +0000 UTC Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.257546 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:16 crc kubenswrapper[5017]: E0122 14:04:16.257813 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.258781 5017 scope.go:117] "RemoveContainer" containerID="1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.318786 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.318831 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.318843 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.318859 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.318872 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:16Z","lastTransitionTime":"2026-01-22T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.420738 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.420768 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.420776 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.420789 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.420798 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:16Z","lastTransitionTime":"2026-01-22T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.530826 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.530861 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.530869 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.530882 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.530891 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:16Z","lastTransitionTime":"2026-01-22T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.634661 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.634701 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.634711 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.634725 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.634734 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:16Z","lastTransitionTime":"2026-01-22T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.737431 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.737478 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.737493 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.737515 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.737530 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:16Z","lastTransitionTime":"2026-01-22T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.840340 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.840378 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.840388 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.840406 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.840415 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:16Z","lastTransitionTime":"2026-01-22T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.943731 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.943781 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.943794 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.943813 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.943823 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:16Z","lastTransitionTime":"2026-01-22T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.982224 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/2.log" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.984478 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222"} Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.985058 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:04:16 crc kubenswrapper[5017]: I0122 14:04:16.995674 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.014092 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.027802 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.039524 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.046746 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.046786 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.046796 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.046813 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.046822 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:17Z","lastTransitionTime":"2026-01-22T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.049128 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.058283 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.077049 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:50Z\\\",\\\"message\\\":\\\"_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0122 14:03:49.845153 6671 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:03:49.844840 6671 services_controller.go:454] Service openshift-marketplace/marketplace-operator-metrics for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0122 14:03:49.845218 6671 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.092259 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.106185 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.120973 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.132763 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.144244 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.148550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.148584 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.148595 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.148610 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.148621 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:17Z","lastTransitionTime":"2026-01-22T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.155730 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.170050 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.182707 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.196605 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.210724 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"2026-01-22T14:03:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634\\\\n2026-01-22T14:03:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634 to /host/opt/cni/bin/\\\\n2026-01-22T14:03:28Z [verbose] multus-daemon started\\\\n2026-01-22T14:03:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:04:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:04:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.221426 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.250511 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.250551 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.250560 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.250574 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.250584 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:17Z","lastTransitionTime":"2026-01-22T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.257572 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 23:53:03.947192438 +0000 UTC Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.257747 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.257778 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:17 crc kubenswrapper[5017]: E0122 14:04:17.257820 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.257779 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:17 crc kubenswrapper[5017]: E0122 14:04:17.257868 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:17 crc kubenswrapper[5017]: E0122 14:04:17.258110 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.352507 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.352546 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.352559 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.352573 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.352583 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:17Z","lastTransitionTime":"2026-01-22T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.454549 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.454590 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.454601 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.454617 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.454628 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:17Z","lastTransitionTime":"2026-01-22T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.557378 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.557423 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.557433 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.557449 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.557460 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:17Z","lastTransitionTime":"2026-01-22T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.660330 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.660370 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.660380 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.660398 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.660414 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:17Z","lastTransitionTime":"2026-01-22T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.763552 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.763584 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.763593 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.763606 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.763615 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:17Z","lastTransitionTime":"2026-01-22T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.866195 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.866250 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.866267 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.866286 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.866301 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:17Z","lastTransitionTime":"2026-01-22T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.968369 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.968408 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.968419 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.968442 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.968455 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:17Z","lastTransitionTime":"2026-01-22T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.992664 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/3.log" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.993301 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/2.log" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.996621 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222" exitCode=1 Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.996699 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222"} Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.996881 5017 scope.go:117] "RemoveContainer" containerID="1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3" Jan 22 14:04:17 crc kubenswrapper[5017]: I0122 14:04:17.997905 5017 scope.go:117] "RemoveContainer" containerID="20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222" Jan 22 14:04:17 crc kubenswrapper[5017]: E0122 14:04:17.998227 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.014465 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.027211 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.040536 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.051001 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.071344 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.071384 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.071399 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.071416 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.071428 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:18Z","lastTransitionTime":"2026-01-22T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.071614 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dfa66497be417f4ab79a5b95b3150a3859ef6b3c272a317360ef12a6da395b3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:03:50Z\\\",\\\"message\\\":\\\"_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0122 14:03:49.845153 6671 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:03:49.844840 6671 services_controller.go:454] Service openshift-marketplace/marketplace-operator-metrics for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0122 14:03:49.845218 6671 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network co\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:17Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF0122 14:04:17.012242 7056 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.083901 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.095739 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.110962 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.123639 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.135806 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"2026-01-22T14:03:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634\\\\n2026-01-22T14:03:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634 to /host/opt/cni/bin/\\\\n2026-01-22T14:03:28Z [verbose] multus-daemon started\\\\n2026-01-22T14:03:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:04:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:04:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.145588 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.155651 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.164994 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.173251 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.173289 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.173301 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.173316 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.173328 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:18Z","lastTransitionTime":"2026-01-22T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.177251 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.189869 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.201433 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.211278 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.232161 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.256763 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:18 crc kubenswrapper[5017]: E0122 14:04:18.256863 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.257813 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 06:35:19.089588319 +0000 UTC Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.275289 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.275323 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.275335 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.275351 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.275362 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:18Z","lastTransitionTime":"2026-01-22T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.378218 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.378278 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.378295 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.378318 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.378335 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:18Z","lastTransitionTime":"2026-01-22T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.481167 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.481208 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.481218 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.481233 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.481242 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:18Z","lastTransitionTime":"2026-01-22T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.583230 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.583267 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.583277 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.583291 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.583302 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:18Z","lastTransitionTime":"2026-01-22T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.685075 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.685133 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.685143 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.685159 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.685169 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:18Z","lastTransitionTime":"2026-01-22T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.787271 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.787300 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.787309 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.787322 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.787330 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:18Z","lastTransitionTime":"2026-01-22T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.889777 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.889820 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.889834 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.889852 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.889866 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:18Z","lastTransitionTime":"2026-01-22T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.991804 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.992015 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.992103 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.992199 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.992259 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:18Z","lastTransitionTime":"2026-01-22T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:18 crc kubenswrapper[5017]: I0122 14:04:18.999961 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/3.log" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.002760 5017 scope.go:117] "RemoveContainer" containerID="20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222" Jan 22 14:04:19 crc kubenswrapper[5017]: E0122 14:04:19.002998 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.020898 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.036476 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.051302 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.064850 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.076223 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.088257 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.094934 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.094988 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.095001 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.095027 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.095042 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.101069 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.114926 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.128132 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.138749 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.160988 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:17Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF0122 14:04:17.012242 7056 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:04:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.172532 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.185487 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.197059 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.197138 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.197153 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.197170 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.197181 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.198810 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.209693 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.221028 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"2026-01-22T14:03:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634\\\\n2026-01-22T14:03:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634 to /host/opt/cni/bin/\\\\n2026-01-22T14:03:28Z [verbose] multus-daemon started\\\\n2026-01-22T14:03:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:04:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:04:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.233533 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.244547 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.257270 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.257317 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.257413 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:19 crc kubenswrapper[5017]: E0122 14:04:19.257417 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:19 crc kubenswrapper[5017]: E0122 14:04:19.257519 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:19 crc kubenswrapper[5017]: E0122 14:04:19.257610 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.259282 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 23:47:51.876937287 +0000 UTC Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.299375 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.299406 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.299416 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.299428 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.299438 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.401464 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.401506 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.401517 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.401533 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.401544 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.503548 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.503579 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.503588 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.503602 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.503612 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.606723 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.606788 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.606802 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.606818 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.606832 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.709028 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.709098 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.709134 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.709150 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.709161 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.775847 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.775917 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.775929 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.775948 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.775959 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: E0122 14:04:19.788367 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.791400 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.791440 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.791449 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.791465 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.791475 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: E0122 14:04:19.802649 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.806344 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.806378 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.806389 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.806403 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.806414 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: E0122 14:04:19.818495 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.823797 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.823849 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.823861 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.823883 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.823899 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: E0122 14:04:19.839793 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.844682 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.844712 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.844723 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.844739 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.844749 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: E0122 14:04:19.856744 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:19Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:19 crc kubenswrapper[5017]: E0122 14:04:19.856892 5017 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.858785 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.858815 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.858825 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.858840 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.858852 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.961159 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.961205 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.961215 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.961230 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:19 crc kubenswrapper[5017]: I0122 14:04:19.961240 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:19Z","lastTransitionTime":"2026-01-22T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.064836 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.064910 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.064934 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.064963 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.064982 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:20Z","lastTransitionTime":"2026-01-22T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.168456 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.168514 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.168529 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.168548 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.168560 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:20Z","lastTransitionTime":"2026-01-22T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.257549 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:20 crc kubenswrapper[5017]: E0122 14:04:20.257837 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.259665 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 13:25:21.190724954 +0000 UTC Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.271327 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.271380 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.271399 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.271420 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.271437 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:20Z","lastTransitionTime":"2026-01-22T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.374294 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.374364 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.374390 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.374427 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.374451 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:20Z","lastTransitionTime":"2026-01-22T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.477667 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.477800 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.477824 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.477863 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.477890 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:20Z","lastTransitionTime":"2026-01-22T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.581658 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.581723 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.581738 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.581793 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.581810 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:20Z","lastTransitionTime":"2026-01-22T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.689050 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.689452 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.689533 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.689644 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.689719 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:20Z","lastTransitionTime":"2026-01-22T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.793993 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.794062 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.794086 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.794150 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.794172 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:20Z","lastTransitionTime":"2026-01-22T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.896898 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.896957 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.896969 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.896988 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:20 crc kubenswrapper[5017]: I0122 14:04:20.897000 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:20Z","lastTransitionTime":"2026-01-22T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.000867 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.000934 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.000948 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.000965 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.000975 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:21Z","lastTransitionTime":"2026-01-22T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.103419 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.103504 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.103517 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.103536 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.103548 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:21Z","lastTransitionTime":"2026-01-22T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.206368 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.206411 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.206421 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.206438 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.206449 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:21Z","lastTransitionTime":"2026-01-22T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.259359 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.259383 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:21 crc kubenswrapper[5017]: E0122 14:04:21.259628 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:21 crc kubenswrapper[5017]: E0122 14:04:21.259517 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.259387 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:21 crc kubenswrapper[5017]: E0122 14:04:21.259708 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.259747 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 18:45:55.096705521 +0000 UTC Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.308561 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.308671 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.308700 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.308724 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.308740 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:21Z","lastTransitionTime":"2026-01-22T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.411247 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.411290 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.411301 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.411318 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.411329 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:21Z","lastTransitionTime":"2026-01-22T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.514847 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.514900 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.514913 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.514932 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.514946 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:21Z","lastTransitionTime":"2026-01-22T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.617443 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.617494 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.617507 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.617525 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.617538 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:21Z","lastTransitionTime":"2026-01-22T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.720173 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.720221 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.720235 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.720256 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.720269 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:21Z","lastTransitionTime":"2026-01-22T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.822417 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.822461 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.822472 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.822488 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.822500 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:21Z","lastTransitionTime":"2026-01-22T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.925000 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.925385 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.926194 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.926476 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:21 crc kubenswrapper[5017]: I0122 14:04:21.926665 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:21Z","lastTransitionTime":"2026-01-22T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.030160 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.030208 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.030217 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.030230 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.030239 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:22Z","lastTransitionTime":"2026-01-22T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.133592 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.133630 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.133639 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.133654 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.133664 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:22Z","lastTransitionTime":"2026-01-22T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.235731 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.235767 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.235775 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.235789 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.235799 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:22Z","lastTransitionTime":"2026-01-22T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.257473 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:22 crc kubenswrapper[5017]: E0122 14:04:22.257606 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.260675 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 13:21:15.315884684 +0000 UTC Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.338300 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.338369 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.338387 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.338412 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.338430 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:22Z","lastTransitionTime":"2026-01-22T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.440869 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.440911 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.440922 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.440941 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.440951 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:22Z","lastTransitionTime":"2026-01-22T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.543415 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.543458 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.543470 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.543485 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.543498 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:22Z","lastTransitionTime":"2026-01-22T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.645743 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.645798 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.645817 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.645839 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.645857 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:22Z","lastTransitionTime":"2026-01-22T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.748620 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.748652 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.748666 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.748684 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.748696 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:22Z","lastTransitionTime":"2026-01-22T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.851326 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.851426 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.851445 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.851475 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.851492 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:22Z","lastTransitionTime":"2026-01-22T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.954205 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.954309 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.954318 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.954330 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:22 crc kubenswrapper[5017]: I0122 14:04:22.954339 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:22Z","lastTransitionTime":"2026-01-22T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.056918 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.056981 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.057008 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.057037 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.057060 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:23Z","lastTransitionTime":"2026-01-22T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.159742 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.159787 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.159798 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.159813 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.159826 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:23Z","lastTransitionTime":"2026-01-22T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.257068 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.257068 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:23 crc kubenswrapper[5017]: E0122 14:04:23.257234 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:23 crc kubenswrapper[5017]: E0122 14:04:23.257305 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.257381 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:23 crc kubenswrapper[5017]: E0122 14:04:23.257573 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.260763 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 21:22:49.289668139 +0000 UTC Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.261683 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.261712 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.261723 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.261739 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.261751 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:23Z","lastTransitionTime":"2026-01-22T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.363798 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.363831 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.363841 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.363856 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.363866 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:23Z","lastTransitionTime":"2026-01-22T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.466053 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.466103 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.466127 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.466149 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.466160 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:23Z","lastTransitionTime":"2026-01-22T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.568698 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.568736 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.568749 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.568765 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.568778 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:23Z","lastTransitionTime":"2026-01-22T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.671503 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.671534 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.671544 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.671557 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.671567 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:23Z","lastTransitionTime":"2026-01-22T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.774238 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.774287 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.774302 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.774324 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.774339 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:23Z","lastTransitionTime":"2026-01-22T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.876795 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.876835 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.876848 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.876866 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.876876 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:23Z","lastTransitionTime":"2026-01-22T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.978989 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.979032 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.979042 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.979056 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:23 crc kubenswrapper[5017]: I0122 14:04:23.979064 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:23Z","lastTransitionTime":"2026-01-22T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.081313 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.081363 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.081375 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.081391 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.081402 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:24Z","lastTransitionTime":"2026-01-22T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.183509 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.183542 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.183550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.183567 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.183584 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:24Z","lastTransitionTime":"2026-01-22T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.256947 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:24 crc kubenswrapper[5017]: E0122 14:04:24.257106 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.261024 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 08:08:24.021750503 +0000 UTC Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.286198 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.286243 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.286255 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.286299 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.286313 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:24Z","lastTransitionTime":"2026-01-22T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.388701 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.388748 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.388758 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.388771 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.388779 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:24Z","lastTransitionTime":"2026-01-22T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.490963 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.491035 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.491059 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.491090 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.491160 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:24Z","lastTransitionTime":"2026-01-22T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.594028 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.594069 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.594080 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.594097 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.594131 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:24Z","lastTransitionTime":"2026-01-22T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.696956 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.697003 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.697011 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.697029 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.697039 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:24Z","lastTransitionTime":"2026-01-22T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.800043 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.800103 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.800157 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.800178 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.800191 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:24Z","lastTransitionTime":"2026-01-22T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.902883 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.902922 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.902931 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.902946 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:24 crc kubenswrapper[5017]: I0122 14:04:24.902955 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:24Z","lastTransitionTime":"2026-01-22T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.005782 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.005831 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.005843 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.005860 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.005871 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:25Z","lastTransitionTime":"2026-01-22T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.107875 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.107937 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.107949 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.107965 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.107974 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:25Z","lastTransitionTime":"2026-01-22T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.209829 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.209892 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.209904 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.209919 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.209930 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:25Z","lastTransitionTime":"2026-01-22T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.257363 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.257510 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:25 crc kubenswrapper[5017]: E0122 14:04:25.257626 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.257822 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:25 crc kubenswrapper[5017]: E0122 14:04:25.257903 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:25 crc kubenswrapper[5017]: E0122 14:04:25.258167 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.261389 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 14:49:42.123948919 +0000 UTC Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.277951 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.292689 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.304918 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.311662 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.311704 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.311715 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.311732 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.311743 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:25Z","lastTransitionTime":"2026-01-22T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.316634 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.325543 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.338484 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.359981 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.376891 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.391172 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.404219 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.413439 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.413488 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.413498 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.413512 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.413522 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:25Z","lastTransitionTime":"2026-01-22T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.425752 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:17Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF0122 14:04:17.012242 7056 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:04:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.435597 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.446312 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.461502 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.475352 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.491521 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"2026-01-22T14:03:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634\\\\n2026-01-22T14:03:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634 to /host/opt/cni/bin/\\\\n2026-01-22T14:03:28Z [verbose] multus-daemon started\\\\n2026-01-22T14:03:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:04:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:04:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.504654 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.516619 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.516673 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.516684 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.516700 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.516709 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:25Z","lastTransitionTime":"2026-01-22T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.517412 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.620910 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.620969 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.620990 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.621065 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.621087 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:25Z","lastTransitionTime":"2026-01-22T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.723744 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.723788 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.723798 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.723813 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.723822 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:25Z","lastTransitionTime":"2026-01-22T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.826859 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.826941 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.826958 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.826975 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.827031 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:25Z","lastTransitionTime":"2026-01-22T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.929014 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.929050 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.929058 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.929075 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:25 crc kubenswrapper[5017]: I0122 14:04:25.929087 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:25Z","lastTransitionTime":"2026-01-22T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.031579 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.031652 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.031667 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.031684 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.031697 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:26Z","lastTransitionTime":"2026-01-22T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.134199 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.134239 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.134252 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.134270 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.134281 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:26Z","lastTransitionTime":"2026-01-22T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.237019 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.237064 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.237079 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.237096 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.237139 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:26Z","lastTransitionTime":"2026-01-22T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.256867 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:26 crc kubenswrapper[5017]: E0122 14:04:26.257034 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.262096 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 06:57:08.587007754 +0000 UTC Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.341039 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.341217 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.341246 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.341281 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.341306 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:26Z","lastTransitionTime":"2026-01-22T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.444619 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.444652 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.444661 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.444674 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.444684 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:26Z","lastTransitionTime":"2026-01-22T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.547620 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.547669 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.547681 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.547704 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.547717 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:26Z","lastTransitionTime":"2026-01-22T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.649547 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.649641 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.649658 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.649686 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.649703 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:26Z","lastTransitionTime":"2026-01-22T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.751850 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.751889 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.751902 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.751917 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.751927 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:26Z","lastTransitionTime":"2026-01-22T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.853705 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.853752 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.853761 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.853781 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.853791 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:26Z","lastTransitionTime":"2026-01-22T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.956240 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.956291 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.956301 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.956317 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:26 crc kubenswrapper[5017]: I0122 14:04:26.956327 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:26Z","lastTransitionTime":"2026-01-22T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.059240 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.059284 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.059297 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.059312 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.059324 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:27Z","lastTransitionTime":"2026-01-22T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.162228 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.162284 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.162295 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.162313 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.162351 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:27Z","lastTransitionTime":"2026-01-22T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.257216 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.257221 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.257280 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:27 crc kubenswrapper[5017]: E0122 14:04:27.257397 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:27 crc kubenswrapper[5017]: E0122 14:04:27.257492 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:27 crc kubenswrapper[5017]: E0122 14:04:27.257628 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.262693 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 17:52:59.767349755 +0000 UTC Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.264253 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.264281 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.264290 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.264301 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.264310 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:27Z","lastTransitionTime":"2026-01-22T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.366659 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.366960 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.367026 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.367093 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.367202 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:27Z","lastTransitionTime":"2026-01-22T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.469154 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.469186 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.469195 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.469208 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.469228 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:27Z","lastTransitionTime":"2026-01-22T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.572608 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.572663 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.572686 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.572715 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.572737 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:27Z","lastTransitionTime":"2026-01-22T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.676382 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.676427 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.676438 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.676458 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.676472 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:27Z","lastTransitionTime":"2026-01-22T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.778858 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.778894 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.778903 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.778916 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.778924 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:27Z","lastTransitionTime":"2026-01-22T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.880860 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.881294 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.881460 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.881651 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.881799 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:27Z","lastTransitionTime":"2026-01-22T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.985017 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.985064 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.985081 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.985102 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:27 crc kubenswrapper[5017]: I0122 14:04:27.985132 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:27Z","lastTransitionTime":"2026-01-22T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.087340 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.087394 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.087406 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.087424 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.087437 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:28Z","lastTransitionTime":"2026-01-22T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.190225 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.190503 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.190575 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.190699 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.190765 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:28Z","lastTransitionTime":"2026-01-22T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.257458 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.257594 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.263621 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 12:58:21.555701723 +0000 UTC Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.283012 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.283223 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:32.283191781 +0000 UTC m=+147.275653639 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.283409 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.283623 5017 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.283685 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:32.283671845 +0000 UTC m=+147.276133703 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.293517 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.293544 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.293554 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.293567 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.293575 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:28Z","lastTransitionTime":"2026-01-22T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.385036 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.385131 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.385157 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.385246 5017 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.385278 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.385305 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:32.385289184 +0000 UTC m=+147.377751042 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.385311 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.385324 5017 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.385352 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.385374 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:32.385358236 +0000 UTC m=+147.377820094 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.385384 5017 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.385398 5017 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:04:28 crc kubenswrapper[5017]: E0122 14:04:28.385462 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:32.385441888 +0000 UTC m=+147.377903746 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.395629 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.395775 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.395840 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.395923 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.395992 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:28Z","lastTransitionTime":"2026-01-22T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.498531 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.498583 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.498594 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.498608 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.498617 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:28Z","lastTransitionTime":"2026-01-22T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.601016 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.601060 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.601071 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.601087 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.601100 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:28Z","lastTransitionTime":"2026-01-22T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.704029 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.704080 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.704092 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.704127 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.704146 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:28Z","lastTransitionTime":"2026-01-22T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.807206 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.807255 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.807267 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.807286 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.807298 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:28Z","lastTransitionTime":"2026-01-22T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.909851 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.909922 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.909939 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.909968 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:28 crc kubenswrapper[5017]: I0122 14:04:28.909984 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:28Z","lastTransitionTime":"2026-01-22T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.012274 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.012319 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.012330 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.012348 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.012361 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:29Z","lastTransitionTime":"2026-01-22T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.114408 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.114648 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.114729 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.114855 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.114958 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:29Z","lastTransitionTime":"2026-01-22T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.216709 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.216751 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.216763 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.216780 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.216792 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:29Z","lastTransitionTime":"2026-01-22T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.257395 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.257423 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:29 crc kubenswrapper[5017]: E0122 14:04:29.257511 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.257571 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:29 crc kubenswrapper[5017]: E0122 14:04:29.257706 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:29 crc kubenswrapper[5017]: E0122 14:04:29.257756 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.264554 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 09:03:22.043931975 +0000 UTC Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.319598 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.319629 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.319638 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.319651 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.319659 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:29Z","lastTransitionTime":"2026-01-22T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.422172 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.422201 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.422209 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.422226 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.422234 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:29Z","lastTransitionTime":"2026-01-22T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.524390 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.524439 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.524450 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.524466 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.524476 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:29Z","lastTransitionTime":"2026-01-22T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.626588 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.626633 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.626643 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.626657 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.626667 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:29Z","lastTransitionTime":"2026-01-22T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.729487 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.729523 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.729534 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.729550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.729561 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:29Z","lastTransitionTime":"2026-01-22T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.832417 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.832489 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.832508 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.832530 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.832548 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:29Z","lastTransitionTime":"2026-01-22T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.935051 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.935309 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.935368 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.935436 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:29 crc kubenswrapper[5017]: I0122 14:04:29.935495 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:29Z","lastTransitionTime":"2026-01-22T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.038836 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.038870 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.038885 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.038902 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.038913 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.108237 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.108279 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.108296 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.108316 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.108327 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: E0122 14:04:30.122328 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.127358 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.127426 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.127436 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.127456 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.127469 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: E0122 14:04:30.140103 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.143594 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.143731 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.143810 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.143879 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.143943 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: E0122 14:04:30.157567 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.160798 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.160849 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.160861 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.160876 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.160885 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: E0122 14:04:30.171424 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.174802 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.174846 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.174857 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.174871 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.174881 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: E0122 14:04:30.188134 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:30 crc kubenswrapper[5017]: E0122 14:04:30.188300 5017 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.190228 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.190370 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.190438 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.190527 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.190626 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.257191 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:30 crc kubenswrapper[5017]: E0122 14:04:30.257592 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.265176 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 14:02:53.437551045 +0000 UTC Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.292992 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.293031 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.293040 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.293056 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.293066 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.395456 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.395530 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.395542 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.395556 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.395580 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.497565 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.497609 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.497619 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.497636 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.497648 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.600224 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.600260 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.600272 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.600287 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.600298 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.702419 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.702704 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.702784 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.702862 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.702947 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.805950 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.806011 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.806024 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.806038 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.806051 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.908982 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.909275 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.909394 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.909488 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:30 crc kubenswrapper[5017]: I0122 14:04:30.909580 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:30Z","lastTransitionTime":"2026-01-22T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.011705 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.011751 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.011762 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.011781 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.011794 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:31Z","lastTransitionTime":"2026-01-22T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.115178 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.115222 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.115233 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.115253 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.115265 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:31Z","lastTransitionTime":"2026-01-22T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.217823 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.217884 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.217896 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.217912 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.217922 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:31Z","lastTransitionTime":"2026-01-22T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.257627 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.257685 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:31 crc kubenswrapper[5017]: E0122 14:04:31.257771 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.257805 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:31 crc kubenswrapper[5017]: E0122 14:04:31.257988 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:31 crc kubenswrapper[5017]: E0122 14:04:31.257913 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.265855 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 08:42:20.008353146 +0000 UTC Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.319986 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.320013 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.320021 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.320034 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.320044 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:31Z","lastTransitionTime":"2026-01-22T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.422758 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.423337 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.423425 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.423524 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.423602 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:31Z","lastTransitionTime":"2026-01-22T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.525853 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.525918 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.525931 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.525947 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.525958 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:31Z","lastTransitionTime":"2026-01-22T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.628025 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.628070 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.628082 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.628153 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.628164 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:31Z","lastTransitionTime":"2026-01-22T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.729845 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.729880 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.729888 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.729901 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.729910 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:31Z","lastTransitionTime":"2026-01-22T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.832165 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.832211 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.832222 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.832239 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.832252 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:31Z","lastTransitionTime":"2026-01-22T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.934671 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.934715 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.934726 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.934743 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:31 crc kubenswrapper[5017]: I0122 14:04:31.934756 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:31Z","lastTransitionTime":"2026-01-22T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.036808 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.036847 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.036856 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.036873 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.036881 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:32Z","lastTransitionTime":"2026-01-22T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.139291 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.139317 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.139324 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.139337 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.139347 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:32Z","lastTransitionTime":"2026-01-22T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.243290 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.243367 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.243386 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.243417 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.243436 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:32Z","lastTransitionTime":"2026-01-22T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.257649 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:32 crc kubenswrapper[5017]: E0122 14:04:32.258290 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.266311 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 05:53:13.931602903 +0000 UTC Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.346108 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.346413 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.346480 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.346550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.346626 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:32Z","lastTransitionTime":"2026-01-22T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.449806 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.449855 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.449864 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.449876 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.449884 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:32Z","lastTransitionTime":"2026-01-22T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.552291 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.552333 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.552345 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.552361 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.552373 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:32Z","lastTransitionTime":"2026-01-22T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.654450 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.654483 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.654491 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.654503 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.654512 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:32Z","lastTransitionTime":"2026-01-22T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.757085 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.757162 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.757175 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.757195 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.757206 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:32Z","lastTransitionTime":"2026-01-22T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.859670 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.859707 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.859716 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.859731 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.859742 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:32Z","lastTransitionTime":"2026-01-22T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.962169 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.962244 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.962257 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.962273 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:32 crc kubenswrapper[5017]: I0122 14:04:32.962285 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:32Z","lastTransitionTime":"2026-01-22T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.064741 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.064780 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.064790 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.064806 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.064818 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:33Z","lastTransitionTime":"2026-01-22T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.166938 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.166980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.166996 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.167013 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.167023 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:33Z","lastTransitionTime":"2026-01-22T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.257361 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.257416 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.257422 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:33 crc kubenswrapper[5017]: E0122 14:04:33.257583 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:33 crc kubenswrapper[5017]: E0122 14:04:33.257720 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:33 crc kubenswrapper[5017]: E0122 14:04:33.257790 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.267261 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 11:25:51.56737467 +0000 UTC Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.268725 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.268781 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.268795 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.268814 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.268825 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:33Z","lastTransitionTime":"2026-01-22T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.370998 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.371036 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.371048 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.371070 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.371083 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:33Z","lastTransitionTime":"2026-01-22T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.472804 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.473078 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.473192 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.473270 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.473337 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:33Z","lastTransitionTime":"2026-01-22T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.575993 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.576031 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.576044 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.576062 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.576072 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:33Z","lastTransitionTime":"2026-01-22T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.679206 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.679268 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.679483 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.679511 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.679532 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:33Z","lastTransitionTime":"2026-01-22T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.781592 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.781628 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.781636 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.781649 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.781676 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:33Z","lastTransitionTime":"2026-01-22T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.884087 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.884422 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.884546 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.885064 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.885365 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:33Z","lastTransitionTime":"2026-01-22T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.988037 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.988088 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.988097 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.988131 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:33 crc kubenswrapper[5017]: I0122 14:04:33.988141 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:33Z","lastTransitionTime":"2026-01-22T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.090988 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.091057 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.091080 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.091142 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.091167 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:34Z","lastTransitionTime":"2026-01-22T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.193547 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.193586 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.193595 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.193611 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.193621 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:34Z","lastTransitionTime":"2026-01-22T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.256879 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:34 crc kubenswrapper[5017]: E0122 14:04:34.257027 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.257661 5017 scope.go:117] "RemoveContainer" containerID="20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222" Jan 22 14:04:34 crc kubenswrapper[5017]: E0122 14:04:34.257841 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.268064 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 07:24:38.306676429 +0000 UTC Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.295907 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.295941 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.295950 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.295965 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.295975 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:34Z","lastTransitionTime":"2026-01-22T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.397889 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.397932 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.397944 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.397962 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.397973 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:34Z","lastTransitionTime":"2026-01-22T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.500634 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.500734 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.500756 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.500772 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.500783 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:34Z","lastTransitionTime":"2026-01-22T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.603666 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.603707 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.603716 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.603730 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.603739 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:34Z","lastTransitionTime":"2026-01-22T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.705765 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.705816 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.705830 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.705849 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.705865 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:34Z","lastTransitionTime":"2026-01-22T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.808065 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.808148 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.808166 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.808190 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.808207 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:34Z","lastTransitionTime":"2026-01-22T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.910582 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.910615 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.910623 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.910636 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:34 crc kubenswrapper[5017]: I0122 14:04:34.910645 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:34Z","lastTransitionTime":"2026-01-22T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.013453 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.013495 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.013513 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.013548 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.013567 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:35Z","lastTransitionTime":"2026-01-22T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.115422 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.115475 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.115487 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.115501 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.115526 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:35Z","lastTransitionTime":"2026-01-22T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.217438 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.217494 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.217511 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.217534 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.217550 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:35Z","lastTransitionTime":"2026-01-22T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.256827 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.256949 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.256949 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:35 crc kubenswrapper[5017]: E0122 14:04:35.257197 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:35 crc kubenswrapper[5017]: E0122 14:04:35.257355 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:35 crc kubenswrapper[5017]: E0122 14:04:35.257594 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.268818 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 02:09:56.366025722 +0000 UTC Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.275599 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.293841 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.313428 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.319492 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.319544 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.319557 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.319578 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.319591 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:35Z","lastTransitionTime":"2026-01-22T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.335390 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"2026-01-22T14:03:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634\\\\n2026-01-22T14:03:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634 to /host/opt/cni/bin/\\\\n2026-01-22T14:03:28Z [verbose] multus-daemon started\\\\n2026-01-22T14:03:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:04:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:04:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.358070 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.371872 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.392265 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.404409 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.415299 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.421732 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.421835 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.421893 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.421968 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.422024 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:35Z","lastTransitionTime":"2026-01-22T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.426687 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.437411 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.453064 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.465985 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.479472 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.491741 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.500695 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.517289 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:17Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF0122 14:04:17.012242 7056 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:04:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.524880 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.524964 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.524978 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.524998 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.525007 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:35Z","lastTransitionTime":"2026-01-22T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.527403 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.627519 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.627765 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.627974 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.628181 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.628312 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:35Z","lastTransitionTime":"2026-01-22T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.731134 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.731462 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.731562 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.731664 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.731767 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:35Z","lastTransitionTime":"2026-01-22T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.834528 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.834565 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.834577 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.834593 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.834606 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:35Z","lastTransitionTime":"2026-01-22T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.936344 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.936715 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.936800 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.936894 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:35 crc kubenswrapper[5017]: I0122 14:04:35.936976 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:35Z","lastTransitionTime":"2026-01-22T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.040304 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.040344 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.040353 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.040389 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.040400 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:36Z","lastTransitionTime":"2026-01-22T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.142705 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.142768 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.142779 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.142795 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.142806 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:36Z","lastTransitionTime":"2026-01-22T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.246197 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.246233 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.246242 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.246256 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.246269 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:36Z","lastTransitionTime":"2026-01-22T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.257405 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:36 crc kubenswrapper[5017]: E0122 14:04:36.257505 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.270589 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 19:42:51.11488989 +0000 UTC Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.270940 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.349652 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.349703 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.349716 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.349733 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.350069 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:36Z","lastTransitionTime":"2026-01-22T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.453195 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.453232 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.453244 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.453260 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.453273 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:36Z","lastTransitionTime":"2026-01-22T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.555298 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.555359 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.555373 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.555394 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.555408 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:36Z","lastTransitionTime":"2026-01-22T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.657909 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.657967 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.657980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.658007 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.658022 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:36Z","lastTransitionTime":"2026-01-22T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.760932 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.760974 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.760986 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.761003 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.761014 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:36Z","lastTransitionTime":"2026-01-22T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.862847 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.862888 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.862902 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.862918 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.862927 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:36Z","lastTransitionTime":"2026-01-22T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.966655 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.966696 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.966939 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.966992 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:36 crc kubenswrapper[5017]: I0122 14:04:36.967004 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:36Z","lastTransitionTime":"2026-01-22T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.070194 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.070255 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.070274 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.070294 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.070313 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:37Z","lastTransitionTime":"2026-01-22T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.173039 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.173086 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.173096 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.173124 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.173136 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:37Z","lastTransitionTime":"2026-01-22T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.257178 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.257188 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:37 crc kubenswrapper[5017]: E0122 14:04:37.257464 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:37 crc kubenswrapper[5017]: E0122 14:04:37.257609 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.257195 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:37 crc kubenswrapper[5017]: E0122 14:04:37.257695 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.271319 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 07:55:25.814612356 +0000 UTC Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.274930 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.274974 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.274986 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.275002 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.275012 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:37Z","lastTransitionTime":"2026-01-22T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.377381 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.377432 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.377444 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.377463 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.377478 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:37Z","lastTransitionTime":"2026-01-22T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.479343 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.479377 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.479386 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.479398 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.479407 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:37Z","lastTransitionTime":"2026-01-22T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.583222 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.583275 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.583287 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.583304 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.583316 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:37Z","lastTransitionTime":"2026-01-22T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.685154 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.685195 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.685208 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.685224 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.685235 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:37Z","lastTransitionTime":"2026-01-22T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.787377 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.787404 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.787411 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.787424 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.787433 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:37Z","lastTransitionTime":"2026-01-22T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.890030 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.890078 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.890086 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.890102 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.890130 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:37Z","lastTransitionTime":"2026-01-22T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.992783 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.992826 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.992839 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.992886 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:37 crc kubenswrapper[5017]: I0122 14:04:37.993003 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:37Z","lastTransitionTime":"2026-01-22T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.095350 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.095393 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.095404 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.095417 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.095426 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:38Z","lastTransitionTime":"2026-01-22T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.197686 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.197741 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.197752 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.197770 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.197781 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:38Z","lastTransitionTime":"2026-01-22T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.256669 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:38 crc kubenswrapper[5017]: E0122 14:04:38.256823 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.271459 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 10:27:10.412861754 +0000 UTC Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.299619 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.299668 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.299679 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.299695 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.299706 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:38Z","lastTransitionTime":"2026-01-22T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.402537 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.402581 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.402594 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.402611 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.402624 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:38Z","lastTransitionTime":"2026-01-22T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.505777 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.505821 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.505830 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.505848 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.505858 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:38Z","lastTransitionTime":"2026-01-22T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.608671 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.608752 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.608775 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.608805 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.608827 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:38Z","lastTransitionTime":"2026-01-22T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.710806 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.710855 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.710866 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.710884 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.710896 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:38Z","lastTransitionTime":"2026-01-22T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.813446 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.813500 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.813511 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.813530 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.813545 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:38Z","lastTransitionTime":"2026-01-22T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.915861 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.915907 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.915918 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.915934 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:38 crc kubenswrapper[5017]: I0122 14:04:38.915947 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:38Z","lastTransitionTime":"2026-01-22T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.018036 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.018078 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.018086 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.018102 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.018134 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:39Z","lastTransitionTime":"2026-01-22T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.120915 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.120952 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.120960 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.120973 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.120982 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:39Z","lastTransitionTime":"2026-01-22T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.222974 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.223020 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.223032 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.223050 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.223062 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:39Z","lastTransitionTime":"2026-01-22T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.256785 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.256794 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:39 crc kubenswrapper[5017]: E0122 14:04:39.256948 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.256801 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:39 crc kubenswrapper[5017]: E0122 14:04:39.256987 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:39 crc kubenswrapper[5017]: E0122 14:04:39.257052 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.272091 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 22:58:13.570170907 +0000 UTC Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.325506 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.325598 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.325615 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.325631 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.325643 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:39Z","lastTransitionTime":"2026-01-22T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.427716 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.427804 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.427814 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.427828 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.427838 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:39Z","lastTransitionTime":"2026-01-22T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.530353 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.530398 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.530410 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.530427 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.530439 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:39Z","lastTransitionTime":"2026-01-22T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.633527 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.633567 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.633578 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.633618 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.633627 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:39Z","lastTransitionTime":"2026-01-22T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.735609 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.735672 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.735693 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.735721 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.735743 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:39Z","lastTransitionTime":"2026-01-22T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.837988 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.838031 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.838039 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.838052 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.838062 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:39Z","lastTransitionTime":"2026-01-22T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.940816 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.940862 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.940872 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.940886 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:39 crc kubenswrapper[5017]: I0122 14:04:39.940894 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:39Z","lastTransitionTime":"2026-01-22T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.043597 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.043645 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.043664 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.043698 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.043720 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.146612 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.146654 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.146662 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.146676 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.146685 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.249883 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.249938 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.249947 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.249961 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.249970 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.257065 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:40 crc kubenswrapper[5017]: E0122 14:04:40.257192 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.273164 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 11:19:24.384746517 +0000 UTC Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.351985 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.352028 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.352040 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.352057 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.352068 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.454541 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.454589 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.454600 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.454617 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.454637 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.459188 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.459255 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.459281 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.459312 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.459348 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: E0122 14:04:40.478338 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.485567 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.485641 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.485681 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.485715 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.485756 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: E0122 14:04:40.505546 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.509793 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.509852 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.509864 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.509881 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.509917 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: E0122 14:04:40.527071 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.531149 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.531220 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.531232 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.531249 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.531259 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: E0122 14:04:40.543247 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.547486 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.547549 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.547562 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.547580 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.547591 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: E0122 14:04:40.561356 5017 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"ff9e96f4-5103-4727-9fe8-65027bb65e8c\\\",\\\"systemUUID\\\":\\\"ab3ea0ca-6acb-4547-97ac-1ee32c5dfb84\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:40Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:40 crc kubenswrapper[5017]: E0122 14:04:40.561546 5017 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.563597 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.563682 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.563713 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.563733 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.563744 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.666216 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.666259 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.666318 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.666333 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.666342 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.768924 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.768968 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.768980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.768998 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.769009 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.871788 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.871860 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.871879 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.871926 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.871940 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.974273 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.974342 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.974361 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.974379 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:40 crc kubenswrapper[5017]: I0122 14:04:40.974392 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:40Z","lastTransitionTime":"2026-01-22T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.076868 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.076926 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.076939 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.076959 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.076973 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:41Z","lastTransitionTime":"2026-01-22T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.180040 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.180086 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.180097 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.180131 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.180144 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:41Z","lastTransitionTime":"2026-01-22T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.257244 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.257358 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.257252 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:41 crc kubenswrapper[5017]: E0122 14:04:41.257474 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:41 crc kubenswrapper[5017]: E0122 14:04:41.257741 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:41 crc kubenswrapper[5017]: E0122 14:04:41.257983 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.273617 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 07:48:02.855576359 +0000 UTC Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.282530 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.282569 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.282583 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.282599 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.282610 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:41Z","lastTransitionTime":"2026-01-22T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.384874 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.384916 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.384924 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.384938 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.384948 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:41Z","lastTransitionTime":"2026-01-22T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.488610 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.488652 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.488662 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.488676 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.488688 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:41Z","lastTransitionTime":"2026-01-22T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.590994 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.591063 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.591076 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.591094 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.591107 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:41Z","lastTransitionTime":"2026-01-22T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.693256 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.693310 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.693322 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.693340 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.693353 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:41Z","lastTransitionTime":"2026-01-22T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.795970 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.796006 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.796014 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.796026 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.796036 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:41Z","lastTransitionTime":"2026-01-22T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.898028 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.898070 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.898081 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.898097 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:41 crc kubenswrapper[5017]: I0122 14:04:41.898106 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:41Z","lastTransitionTime":"2026-01-22T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.000458 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.000525 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.000536 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.000552 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.000567 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:42Z","lastTransitionTime":"2026-01-22T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.102991 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.103032 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.103040 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.103055 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.103064 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:42Z","lastTransitionTime":"2026-01-22T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.205367 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.205422 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.205430 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.205444 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.205454 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:42Z","lastTransitionTime":"2026-01-22T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.256912 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:42 crc kubenswrapper[5017]: E0122 14:04:42.257060 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.274307 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 08:54:59.613413462 +0000 UTC Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.307055 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.307102 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.307133 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.307150 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.307163 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:42Z","lastTransitionTime":"2026-01-22T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.409961 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.410016 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.410027 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.410043 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.410056 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:42Z","lastTransitionTime":"2026-01-22T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.512392 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.512461 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.512473 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.512490 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.512503 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:42Z","lastTransitionTime":"2026-01-22T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.616653 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.616695 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.616704 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.616719 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.616733 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:42Z","lastTransitionTime":"2026-01-22T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.719082 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.719158 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.719172 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.719188 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.719200 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:42Z","lastTransitionTime":"2026-01-22T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.821825 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.821873 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.821886 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.821902 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.821911 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:42Z","lastTransitionTime":"2026-01-22T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.924386 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.924441 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.924452 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.924472 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:42 crc kubenswrapper[5017]: I0122 14:04:42.924482 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:42Z","lastTransitionTime":"2026-01-22T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.027222 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.027266 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.027279 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.027297 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.027309 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:43Z","lastTransitionTime":"2026-01-22T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.129512 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.129541 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.129550 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.129563 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.129572 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:43Z","lastTransitionTime":"2026-01-22T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.231382 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.231427 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.231438 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.231455 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.231465 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:43Z","lastTransitionTime":"2026-01-22T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.256897 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.256961 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.256968 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:43 crc kubenswrapper[5017]: E0122 14:04:43.257037 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:43 crc kubenswrapper[5017]: E0122 14:04:43.257145 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:43 crc kubenswrapper[5017]: E0122 14:04:43.257316 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.274653 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 06:17:44.475185004 +0000 UTC Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.332813 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.332852 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.332863 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.332879 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.332890 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:43Z","lastTransitionTime":"2026-01-22T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.435009 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.435065 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.435078 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.435098 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.435142 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:43Z","lastTransitionTime":"2026-01-22T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.537709 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.537767 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.537778 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.537794 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.537808 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:43Z","lastTransitionTime":"2026-01-22T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.640516 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.640562 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.640571 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.640585 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.640597 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:43Z","lastTransitionTime":"2026-01-22T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.743296 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.743353 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.743364 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.743379 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.743390 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:43Z","lastTransitionTime":"2026-01-22T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.836993 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:43 crc kubenswrapper[5017]: E0122 14:04:43.837210 5017 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:04:43 crc kubenswrapper[5017]: E0122 14:04:43.837271 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs podName:bca74e01-1d78-4eeb-b3db-842fda0babd7 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:47.837256214 +0000 UTC m=+162.829718072 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs") pod "network-metrics-daemon-gc29v" (UID: "bca74e01-1d78-4eeb-b3db-842fda0babd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.845541 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.845587 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.845596 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.845612 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.845621 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:43Z","lastTransitionTime":"2026-01-22T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.948765 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.948839 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.948863 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.948893 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:43 crc kubenswrapper[5017]: I0122 14:04:43.948916 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:43Z","lastTransitionTime":"2026-01-22T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.051882 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.052076 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.052143 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.052176 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.052200 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:44Z","lastTransitionTime":"2026-01-22T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.155046 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.155103 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.155131 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.155150 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.155164 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:44Z","lastTransitionTime":"2026-01-22T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.256950 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:44 crc kubenswrapper[5017]: E0122 14:04:44.257184 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.258018 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.258074 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.258087 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.258099 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.258128 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:44Z","lastTransitionTime":"2026-01-22T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.275496 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 18:59:10.723156222 +0000 UTC Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.360210 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.360252 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.360263 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.360278 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.360289 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:44Z","lastTransitionTime":"2026-01-22T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.462908 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.462943 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.462951 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.462983 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.462994 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:44Z","lastTransitionTime":"2026-01-22T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.565052 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.565101 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.565131 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.565148 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.565157 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:44Z","lastTransitionTime":"2026-01-22T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.667446 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.667490 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.667505 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.667520 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.667533 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:44Z","lastTransitionTime":"2026-01-22T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.770177 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.770228 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.770246 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.770270 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.770287 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:44Z","lastTransitionTime":"2026-01-22T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.872548 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.872589 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.872597 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.872613 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.872623 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:44Z","lastTransitionTime":"2026-01-22T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.974700 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.974746 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.974759 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.974774 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:44 crc kubenswrapper[5017]: I0122 14:04:44.974783 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:44Z","lastTransitionTime":"2026-01-22T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.077337 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.077386 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.077399 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.077444 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.077465 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:45Z","lastTransitionTime":"2026-01-22T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.179662 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.179697 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.179708 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.179724 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.179734 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:45Z","lastTransitionTime":"2026-01-22T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.257768 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.257828 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.257850 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:45 crc kubenswrapper[5017]: E0122 14:04:45.257937 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:45 crc kubenswrapper[5017]: E0122 14:04:45.257988 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:45 crc kubenswrapper[5017]: E0122 14:04:45.258059 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.271232 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d5kwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"075aa74c-f517-49be-9043-c345ecbff66e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c24ed3d79e2898bfbf0d59d55fee76117dbb1405bc0eea4fc77f2932472e180b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6949n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d5kwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.275869 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 04:16:09.910504455 +0000 UTC Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.281517 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.281539 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.281547 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.281560 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.281569 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:45Z","lastTransitionTime":"2026-01-22T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.291349 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"43c64840-5206-4dec-834e-77e0562f19de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:17Z\\\",\\\"message\\\":\\\"vices.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"4607c9b7-15f9-4ba0-86e5-0021ba7e4488\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF0122 14:04:17.012242 7056 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:04:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lx5q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vgtf7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.302888 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f96871c-5cdd-496a-9c9b-b669151acffd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44afb8e08394c51727ad8de33a09f37a7dfef401310befe3d65553362af85562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b45106549b9b14cf3935ae87a553a327fc77b924bd793b38a90d7b6ce9725f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x7c8n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hvbsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.316592 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d69195ba-e14d-4a7b-bb50-d2c0bbd0283e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.329047 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37c7dc72-f9d0-465e-959d-d311370747da\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0471f5086f6dcfc8a8631857ac15c60ac9c9b869c75d347268cafe0da948b9e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cdda6ddd193cbeaac23f1f8bd3f916052f172bd0f1f9c4d3981d5de59b07267\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae568be47a07538d3154840b7bb239fb2701c10d64fa0480c592e2fcf2a17c4a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.341215 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.354536 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.366227 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bcf7359cf7f90888adeabf341c56db7675bcf00922f435fe1b3125e71c4df8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a707dc28159e174daf973d12250b50ac2603f3bf5f77631abb304adc8962e03f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.379338 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"955e4d61-cf5f-4df9-8b71-540ac4dd5856\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27cc996ad3309e591a2bc3aa44c448395bb9255b0a33955a7938a76d6fd2149d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e13280c702237c834b95b5a3b4f0d37afcf7c743cb9c2d1f9a4fa52e3df467c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3853412fd73386f905f919b3da6ba60aaf857e68a0a1c8f4df17bb37b619de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f554d2b3f6497bc362f5895ee8ec1a69acce775dfc8690e2341716990b2af82e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e95a181dd44f2739406f2560ca357d6c97ed8ddba3ac994ee80e7796c9bb6d00\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5d4446d1639039b85ede01a198b2c4e171889da909cdedf323ff8167f2ed20b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40b0ba32cd8c580e6918772d0a8d3ae98f0013bdc581f7190141604229625373\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4cvx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pjvjs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.383769 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.383800 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.383810 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.383823 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.383834 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:45Z","lastTransitionTime":"2026-01-22T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.388242 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gc29v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bca74e01-1d78-4eeb-b3db-842fda0babd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wnfsn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gc29v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.398652 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eddbc32-157f-423c-9d75-9c93c5e988e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64ce1b2fd6d34be7d6a188ba0958773d3e5c61f1d49aeeaf74b8b25be9246f39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1dbc41418ac75e7457e9f7c96ccae5a4f41000133314ec19b09377870b3c1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://821419c228395ceab721be9e601aeffd3f5e24dce0e095873a2ec4b820b5ac29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e458a0688f1060c001f26d0252b2d96df7301a77b6ddf99a21688999d611811\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.407279 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e76fcdc1-2b4d-4698-94e2-7005db05d257\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d89a1583ab3d460d7a746814667e90ce611803586438aa8e9023b84d422d6c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba454cfca55d3a8ac6fc47a6635a7b89e8271441bebf7df2686387561d60e580\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba454cfca55d3a8ac6fc47a6635a7b89e8271441bebf7df2686387561d60e580\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.418729 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j2qmt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c223e7fe-8360-4731-a32d-3cf60ed7324b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:04:13Z\\\",\\\"message\\\":\\\"2026-01-22T14:03:28+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634\\\\n2026-01-22T14:03:28+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8da2e187-a458-42a7-973c-69aea8e9e634 to /host/opt/cni/bin/\\\\n2026-01-22T14:03:28Z [verbose] multus-daemon started\\\\n2026-01-22T14:03:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:04:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:04:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r5skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j2qmt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.429699 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f43f877-4d1f-4041-af1f-39a962f7ac0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65a95fd24fe61706e1e732dae09eba79ab50c2f1081f4f6d2a6737adc173a9f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-cr62h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.439383 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6m84c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a468d1c-9b98-4126-a4b3-fa1af1a19768\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aebb1acd711350afc7a190194f5aed62c4dcde3b482e34c4d99aaf2870950fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lm6h5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6m84c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.458210 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dcd93daf-5cf9-4883-a9f5-57cef67d8dce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac8597c037a57ed00db3e89041d7b5cdf8debd0f1f2f66ebb73328d9a153daa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://794891617dfe349c4aa6dba8fba89306a62d3479ce352cf4121a3a773b586fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e2978d0374e1d4125253ee9cd0ca4dfd5c33098fe5d0579e084ca4793fc29cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc053db203a46c2a0f0e934df38f20523f9f655ae2227d7759442d4071228b8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be32f0172eb852865ffb97bbbf8af93595597ff37b12ce7154d5ee95afd60ae2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ccd57ca0e87a4355a755aba86ea21b13445ce363a421405bb86d0c21f6a415b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e05681c98edf8f08fc2474f511fa30d2cab946f9c9c9dba9e31f0afef47ca7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:07Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b1c40cc3347d1c23ecd2660bc339a2bd697624de7febb9fa25afd1a9486ed6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:03:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:03:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:03:05Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.469804 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87877154cf4149bd23069ec681f6ce1fd0120d339c2bc582ee1bbd2c3215be4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.480320 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.485734 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.485775 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.485789 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.485808 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.485823 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:45Z","lastTransitionTime":"2026-01-22T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.491435 5017 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:03:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8779e8a235e62a539a2e7f62d93c879f2af4bf547aa1f55e54c78b7905dfef01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:03:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:04:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.587972 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.588013 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.588024 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.588040 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.588051 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:45Z","lastTransitionTime":"2026-01-22T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.689606 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.689682 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.689697 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.689715 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.689727 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:45Z","lastTransitionTime":"2026-01-22T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.792195 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.792227 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.792236 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.792249 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.792257 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:45Z","lastTransitionTime":"2026-01-22T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.894505 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.894563 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.894577 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.894596 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.894607 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:45Z","lastTransitionTime":"2026-01-22T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.997081 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.997138 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.997149 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.997165 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:45 crc kubenswrapper[5017]: I0122 14:04:45.997194 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:45Z","lastTransitionTime":"2026-01-22T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.099574 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.099627 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.099636 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.099652 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.099662 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:46Z","lastTransitionTime":"2026-01-22T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.202673 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.202723 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.202736 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.202754 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.202765 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:46Z","lastTransitionTime":"2026-01-22T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.257350 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:46 crc kubenswrapper[5017]: E0122 14:04:46.257521 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.276725 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 22:53:45.294833227 +0000 UTC Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.308386 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.308897 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.309175 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.309194 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.309204 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:46Z","lastTransitionTime":"2026-01-22T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.411243 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.411275 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.411283 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.411296 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.411305 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:46Z","lastTransitionTime":"2026-01-22T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.513340 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.513408 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.513425 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.513450 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.513465 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:46Z","lastTransitionTime":"2026-01-22T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.615528 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.615570 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.615580 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.615594 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.615605 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:46Z","lastTransitionTime":"2026-01-22T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.718184 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.718227 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.718237 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.718256 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.718267 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:46Z","lastTransitionTime":"2026-01-22T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.820839 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.820880 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.820892 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.820907 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.820918 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:46Z","lastTransitionTime":"2026-01-22T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.923226 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.923279 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.923290 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.923306 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:46 crc kubenswrapper[5017]: I0122 14:04:46.923317 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:46Z","lastTransitionTime":"2026-01-22T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.025663 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.025721 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.025735 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.025766 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.025791 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:47Z","lastTransitionTime":"2026-01-22T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.127544 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.127585 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.127596 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.127613 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.127623 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:47Z","lastTransitionTime":"2026-01-22T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.229554 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.229584 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.229597 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.229615 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.229625 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:47Z","lastTransitionTime":"2026-01-22T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.257055 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.257101 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:47 crc kubenswrapper[5017]: E0122 14:04:47.257235 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.257280 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:47 crc kubenswrapper[5017]: E0122 14:04:47.257588 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:47 crc kubenswrapper[5017]: E0122 14:04:47.257677 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.258043 5017 scope.go:117] "RemoveContainer" containerID="20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222" Jan 22 14:04:47 crc kubenswrapper[5017]: E0122 14:04:47.258250 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vgtf7_openshift-ovn-kubernetes(43c64840-5206-4dec-834e-77e0562f19de)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.277162 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 11:39:28.134689385 +0000 UTC Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.331551 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.331593 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.331604 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.331618 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.331627 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:47Z","lastTransitionTime":"2026-01-22T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.435211 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.435244 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.435251 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.435267 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.435275 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:47Z","lastTransitionTime":"2026-01-22T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.537513 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.537551 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.537584 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.537621 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.537632 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:47Z","lastTransitionTime":"2026-01-22T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.639909 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.639958 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.639968 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.639984 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.639995 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:47Z","lastTransitionTime":"2026-01-22T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.742364 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.742418 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.742435 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.742454 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.742469 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:47Z","lastTransitionTime":"2026-01-22T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.844737 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.844779 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.844792 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.844809 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.844822 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:47Z","lastTransitionTime":"2026-01-22T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.947327 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.947371 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.947381 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.947396 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:47 crc kubenswrapper[5017]: I0122 14:04:47.947406 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:47Z","lastTransitionTime":"2026-01-22T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.049771 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.049802 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.049812 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.049827 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.049837 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:48Z","lastTransitionTime":"2026-01-22T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.151932 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.151978 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.151996 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.152014 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.152025 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:48Z","lastTransitionTime":"2026-01-22T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.255344 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.255393 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.255403 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.255422 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.255434 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:48Z","lastTransitionTime":"2026-01-22T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.256767 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:48 crc kubenswrapper[5017]: E0122 14:04:48.256889 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.277679 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 19:00:45.655698525 +0000 UTC Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.358763 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.358799 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.358810 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.358825 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.358837 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:48Z","lastTransitionTime":"2026-01-22T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.461980 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.462019 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.462030 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.462048 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.462060 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:48Z","lastTransitionTime":"2026-01-22T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.565097 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.565166 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.565178 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.565213 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.565231 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:48Z","lastTransitionTime":"2026-01-22T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.668915 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.668958 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.668968 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.668983 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.668993 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:48Z","lastTransitionTime":"2026-01-22T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.771317 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.771386 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.771401 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.771418 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.771430 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:48Z","lastTransitionTime":"2026-01-22T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.873894 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.873942 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.873950 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.873966 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.873981 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:48Z","lastTransitionTime":"2026-01-22T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.976570 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.976610 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.976619 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.976634 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:48 crc kubenswrapper[5017]: I0122 14:04:48.976644 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:48Z","lastTransitionTime":"2026-01-22T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.083462 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.083517 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.083526 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.083542 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.083551 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:49Z","lastTransitionTime":"2026-01-22T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.186243 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.186533 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.186630 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.186750 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.186852 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:49Z","lastTransitionTime":"2026-01-22T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.257434 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.257492 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:49 crc kubenswrapper[5017]: E0122 14:04:49.257560 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.257504 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:49 crc kubenswrapper[5017]: E0122 14:04:49.257698 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:49 crc kubenswrapper[5017]: E0122 14:04:49.257622 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.277981 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 03:23:55.306940528 +0000 UTC Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.289966 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.290018 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.290028 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.290043 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.290052 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:49Z","lastTransitionTime":"2026-01-22T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.391869 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.391915 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.391928 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.391944 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.391955 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:49Z","lastTransitionTime":"2026-01-22T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.493797 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.493847 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.493871 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.493885 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.493893 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:49Z","lastTransitionTime":"2026-01-22T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.596373 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.596681 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.596776 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.596879 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.596965 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:49Z","lastTransitionTime":"2026-01-22T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.699032 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.699373 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.699475 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.699585 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.699683 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:49Z","lastTransitionTime":"2026-01-22T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.802215 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.802254 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.802265 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.802279 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.802288 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:49Z","lastTransitionTime":"2026-01-22T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.904855 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.904887 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.904895 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.904908 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:49 crc kubenswrapper[5017]: I0122 14:04:49.904918 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:49Z","lastTransitionTime":"2026-01-22T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.007213 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.007489 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.007553 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.007631 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.007698 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:50Z","lastTransitionTime":"2026-01-22T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.110566 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.110866 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.110999 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.111165 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.111304 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:50Z","lastTransitionTime":"2026-01-22T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.216042 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.216087 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.216101 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.216141 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.216154 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:50Z","lastTransitionTime":"2026-01-22T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.257109 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:50 crc kubenswrapper[5017]: E0122 14:04:50.257255 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.279019 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 21:08:38.678752259 +0000 UTC Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.318950 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.319236 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.319341 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.319466 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.319560 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:50Z","lastTransitionTime":"2026-01-22T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.421779 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.421822 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.421836 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.421853 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.421870 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:50Z","lastTransitionTime":"2026-01-22T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.524535 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.524847 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.524931 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.525020 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.525108 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:50Z","lastTransitionTime":"2026-01-22T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.627480 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.627721 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.627803 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.627878 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.627969 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:50Z","lastTransitionTime":"2026-01-22T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.731167 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.731233 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.731256 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.731276 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.731291 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:50Z","lastTransitionTime":"2026-01-22T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.778540 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.778580 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.778590 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.778620 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.778633 5017 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:04:50Z","lastTransitionTime":"2026-01-22T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.827240 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf"] Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.827819 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.830647 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.830686 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.830787 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.833451 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.879482 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=88.879466422 podStartE2EDuration="1m28.879466422s" podCreationTimestamp="2026-01-22 14:03:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:50.852542987 +0000 UTC m=+105.845004855" watchObservedRunningTime="2026-01-22 14:04:50.879466422 +0000 UTC m=+105.871928280" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.895954 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=83.895932394 podStartE2EDuration="1m23.895932394s" podCreationTimestamp="2026-01-22 14:03:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:50.879643657 +0000 UTC m=+105.872105525" watchObservedRunningTime="2026-01-22 14:04:50.895932394 +0000 UTC m=+105.888394262" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.906721 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.906790 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.906819 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.906863 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.906884 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.951530 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-d5kwd" podStartSLOduration=85.951505336 podStartE2EDuration="1m25.951505336s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:50.928456207 +0000 UTC m=+105.920918065" watchObservedRunningTime="2026-01-22 14:04:50.951505336 +0000 UTC m=+105.943967194" Jan 22 14:04:50 crc kubenswrapper[5017]: I0122 14:04:50.987141 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hvbsd" podStartSLOduration=85.98710352 podStartE2EDuration="1m25.98710352s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:50.967072071 +0000 UTC m=+105.959533929" watchObservedRunningTime="2026-01-22 14:04:50.98710352 +0000 UTC m=+105.979565378" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.006852 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-pjvjs" podStartSLOduration=86.00683717 podStartE2EDuration="1m26.00683717s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:51.006011446 +0000 UTC m=+105.998473304" watchObservedRunningTime="2026-01-22 14:04:51.00683717 +0000 UTC m=+105.999299028" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.007631 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.007673 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.007710 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.007753 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.007811 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.007834 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.007925 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.009251 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.017835 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.022174 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=54.022158849 podStartE2EDuration="54.022158849s" podCreationTimestamp="2026-01-22 14:03:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:51.021904621 +0000 UTC m=+106.014366479" watchObservedRunningTime="2026-01-22 14:04:51.022158849 +0000 UTC m=+106.014620707" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.026134 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7271cae7-2aeb-4120-aec9-cd8e8c2d02a3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tznbf\" (UID: \"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.049870 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-j2qmt" podStartSLOduration=86.049851047 podStartE2EDuration="1m26.049851047s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:51.049561178 +0000 UTC m=+106.042023036" watchObservedRunningTime="2026-01-22 14:04:51.049851047 +0000 UTC m=+106.042312905" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.050338 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=15.050332521 podStartE2EDuration="15.050332521s" podCreationTimestamp="2026-01-22 14:04:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:51.035200369 +0000 UTC m=+106.027662227" watchObservedRunningTime="2026-01-22 14:04:51.050332521 +0000 UTC m=+106.042794379" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.061475 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podStartSLOduration=86.061456634 podStartE2EDuration="1m26.061456634s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:51.061310519 +0000 UTC m=+106.053772377" watchObservedRunningTime="2026-01-22 14:04:51.061456634 +0000 UTC m=+106.053918492" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.127231 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=85.127184668 podStartE2EDuration="1m25.127184668s" podCreationTimestamp="2026-01-22 14:03:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:51.109676255 +0000 UTC m=+106.102138113" watchObservedRunningTime="2026-01-22 14:04:51.127184668 +0000 UTC m=+106.119646526" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.152835 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" Jan 22 14:04:51 crc kubenswrapper[5017]: W0122 14:04:51.165237 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7271cae7_2aeb_4120_aec9_cd8e8c2d02a3.slice/crio-5dbd557a141249119372e589365bb410e1c6369229becfb840911b84e9f26a30 WatchSource:0}: Error finding container 5dbd557a141249119372e589365bb410e1c6369229becfb840911b84e9f26a30: Status 404 returned error can't find the container with id 5dbd557a141249119372e589365bb410e1c6369229becfb840911b84e9f26a30 Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.257005 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:51 crc kubenswrapper[5017]: E0122 14:04:51.257255 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.257391 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:51 crc kubenswrapper[5017]: E0122 14:04:51.257503 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.257614 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:51 crc kubenswrapper[5017]: E0122 14:04:51.257728 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.280063 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 15:39:16.888713849 +0000 UTC Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.280174 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 22 14:04:51 crc kubenswrapper[5017]: I0122 14:04:51.287930 5017 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 22 14:04:52 crc kubenswrapper[5017]: I0122 14:04:52.103303 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" event={"ID":"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3","Type":"ContainerStarted","Data":"40c4368c17bef433cb7102ace4bbca4e55a1b26801ec53122c7f204d70c4c570"} Jan 22 14:04:52 crc kubenswrapper[5017]: I0122 14:04:52.103350 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" event={"ID":"7271cae7-2aeb-4120-aec9-cd8e8c2d02a3","Type":"ContainerStarted","Data":"5dbd557a141249119372e589365bb410e1c6369229becfb840911b84e9f26a30"} Jan 22 14:04:52 crc kubenswrapper[5017]: I0122 14:04:52.119416 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tznbf" podStartSLOduration=87.119392508 podStartE2EDuration="1m27.119392508s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:52.118493261 +0000 UTC m=+107.110955119" watchObservedRunningTime="2026-01-22 14:04:52.119392508 +0000 UTC m=+107.111854366" Jan 22 14:04:52 crc kubenswrapper[5017]: I0122 14:04:52.120473 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-6m84c" podStartSLOduration=87.12046413 podStartE2EDuration="1m27.12046413s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:51.175436441 +0000 UTC m=+106.167898299" watchObservedRunningTime="2026-01-22 14:04:52.12046413 +0000 UTC m=+107.112925988" Jan 22 14:04:52 crc kubenswrapper[5017]: I0122 14:04:52.257482 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:52 crc kubenswrapper[5017]: E0122 14:04:52.257609 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:53 crc kubenswrapper[5017]: I0122 14:04:53.257271 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:53 crc kubenswrapper[5017]: I0122 14:04:53.257323 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:53 crc kubenswrapper[5017]: I0122 14:04:53.257279 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:53 crc kubenswrapper[5017]: E0122 14:04:53.257422 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:53 crc kubenswrapper[5017]: E0122 14:04:53.257509 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:53 crc kubenswrapper[5017]: E0122 14:04:53.257575 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:54 crc kubenswrapper[5017]: I0122 14:04:54.256784 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:54 crc kubenswrapper[5017]: E0122 14:04:54.256965 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:55 crc kubenswrapper[5017]: I0122 14:04:55.256696 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:55 crc kubenswrapper[5017]: I0122 14:04:55.256732 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:55 crc kubenswrapper[5017]: E0122 14:04:55.257920 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:55 crc kubenswrapper[5017]: I0122 14:04:55.258004 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:55 crc kubenswrapper[5017]: E0122 14:04:55.258591 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:55 crc kubenswrapper[5017]: E0122 14:04:55.258789 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:56 crc kubenswrapper[5017]: I0122 14:04:56.256914 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:56 crc kubenswrapper[5017]: E0122 14:04:56.257191 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:57 crc kubenswrapper[5017]: I0122 14:04:57.256851 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:57 crc kubenswrapper[5017]: I0122 14:04:57.256897 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:57 crc kubenswrapper[5017]: I0122 14:04:57.256939 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:57 crc kubenswrapper[5017]: E0122 14:04:57.257188 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:57 crc kubenswrapper[5017]: E0122 14:04:57.257321 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:57 crc kubenswrapper[5017]: E0122 14:04:57.257413 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:04:58 crc kubenswrapper[5017]: I0122 14:04:58.436868 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:04:58 crc kubenswrapper[5017]: I0122 14:04:58.436868 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:58 crc kubenswrapper[5017]: E0122 14:04:58.437002 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:04:58 crc kubenswrapper[5017]: E0122 14:04:58.437126 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:58 crc kubenswrapper[5017]: I0122 14:04:58.438741 5017 scope.go:117] "RemoveContainer" containerID="20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222" Jan 22 14:04:59 crc kubenswrapper[5017]: I0122 14:04:59.124703 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/3.log" Jan 22 14:04:59 crc kubenswrapper[5017]: I0122 14:04:59.127287 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerStarted","Data":"1d1097c8995866c82cfe80dc8db5fa80717f4591526ecd539d41108303fdab60"} Jan 22 14:04:59 crc kubenswrapper[5017]: I0122 14:04:59.128247 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:04:59 crc kubenswrapper[5017]: I0122 14:04:59.151260 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gc29v"] Jan 22 14:04:59 crc kubenswrapper[5017]: I0122 14:04:59.151377 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:04:59 crc kubenswrapper[5017]: E0122 14:04:59.151514 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:04:59 crc kubenswrapper[5017]: I0122 14:04:59.169497 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podStartSLOduration=94.169476982 podStartE2EDuration="1m34.169476982s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:04:59.16738356 +0000 UTC m=+114.159845438" watchObservedRunningTime="2026-01-22 14:04:59.169476982 +0000 UTC m=+114.161938840" Jan 22 14:04:59 crc kubenswrapper[5017]: I0122 14:04:59.257009 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:04:59 crc kubenswrapper[5017]: E0122 14:04:59.257434 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:04:59 crc kubenswrapper[5017]: I0122 14:04:59.257590 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:04:59 crc kubenswrapper[5017]: E0122 14:04:59.257807 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:05:00 crc kubenswrapper[5017]: I0122 14:05:00.131760 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2qmt_c223e7fe-8360-4731-a32d-3cf60ed7324b/kube-multus/1.log" Jan 22 14:05:00 crc kubenswrapper[5017]: I0122 14:05:00.132224 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2qmt_c223e7fe-8360-4731-a32d-3cf60ed7324b/kube-multus/0.log" Jan 22 14:05:00 crc kubenswrapper[5017]: I0122 14:05:00.132265 5017 generic.go:334] "Generic (PLEG): container finished" podID="c223e7fe-8360-4731-a32d-3cf60ed7324b" containerID="5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f" exitCode=1 Jan 22 14:05:00 crc kubenswrapper[5017]: I0122 14:05:00.132754 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2qmt" event={"ID":"c223e7fe-8360-4731-a32d-3cf60ed7324b","Type":"ContainerDied","Data":"5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f"} Jan 22 14:05:00 crc kubenswrapper[5017]: I0122 14:05:00.132829 5017 scope.go:117] "RemoveContainer" containerID="ee76d6a25cdedd49a95d58fd493bd18f13c44383fa510662c174ea1f510a79bd" Jan 22 14:05:00 crc kubenswrapper[5017]: I0122 14:05:00.133460 5017 scope.go:117] "RemoveContainer" containerID="5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f" Jan 22 14:05:00 crc kubenswrapper[5017]: E0122 14:05:00.133739 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-j2qmt_openshift-multus(c223e7fe-8360-4731-a32d-3cf60ed7324b)\"" pod="openshift-multus/multus-j2qmt" podUID="c223e7fe-8360-4731-a32d-3cf60ed7324b" Jan 22 14:05:00 crc kubenswrapper[5017]: I0122 14:05:00.257148 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:00 crc kubenswrapper[5017]: E0122 14:05:00.257286 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:05:01 crc kubenswrapper[5017]: I0122 14:05:01.136291 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2qmt_c223e7fe-8360-4731-a32d-3cf60ed7324b/kube-multus/1.log" Jan 22 14:05:01 crc kubenswrapper[5017]: I0122 14:05:01.257023 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:01 crc kubenswrapper[5017]: E0122 14:05:01.257181 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:05:01 crc kubenswrapper[5017]: I0122 14:05:01.257369 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:01 crc kubenswrapper[5017]: E0122 14:05:01.257522 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:05:01 crc kubenswrapper[5017]: I0122 14:05:01.257670 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:01 crc kubenswrapper[5017]: E0122 14:05:01.257732 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:05:02 crc kubenswrapper[5017]: I0122 14:05:02.257382 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:02 crc kubenswrapper[5017]: E0122 14:05:02.257533 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:05:03 crc kubenswrapper[5017]: I0122 14:05:03.257098 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:03 crc kubenswrapper[5017]: I0122 14:05:03.257106 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:03 crc kubenswrapper[5017]: I0122 14:05:03.257281 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:03 crc kubenswrapper[5017]: E0122 14:05:03.257398 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:05:03 crc kubenswrapper[5017]: E0122 14:05:03.257546 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:05:03 crc kubenswrapper[5017]: E0122 14:05:03.257632 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:05:04 crc kubenswrapper[5017]: I0122 14:05:04.257265 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:04 crc kubenswrapper[5017]: E0122 14:05:04.257480 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:05:05 crc kubenswrapper[5017]: E0122 14:05:05.213606 5017 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 22 14:05:05 crc kubenswrapper[5017]: I0122 14:05:05.256744 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:05 crc kubenswrapper[5017]: I0122 14:05:05.256826 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:05 crc kubenswrapper[5017]: I0122 14:05:05.256842 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:05 crc kubenswrapper[5017]: E0122 14:05:05.259002 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:05:05 crc kubenswrapper[5017]: E0122 14:05:05.259379 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:05:05 crc kubenswrapper[5017]: E0122 14:05:05.259639 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:05:05 crc kubenswrapper[5017]: E0122 14:05:05.352123 5017 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 14:05:06 crc kubenswrapper[5017]: I0122 14:05:06.257755 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:06 crc kubenswrapper[5017]: E0122 14:05:06.257939 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:05:07 crc kubenswrapper[5017]: I0122 14:05:07.256926 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:07 crc kubenswrapper[5017]: I0122 14:05:07.256926 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:07 crc kubenswrapper[5017]: I0122 14:05:07.256946 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:07 crc kubenswrapper[5017]: E0122 14:05:07.257063 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:05:07 crc kubenswrapper[5017]: E0122 14:05:07.257285 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:05:07 crc kubenswrapper[5017]: E0122 14:05:07.257362 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:05:08 crc kubenswrapper[5017]: I0122 14:05:08.258312 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:08 crc kubenswrapper[5017]: E0122 14:05:08.258466 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:05:09 crc kubenswrapper[5017]: I0122 14:05:09.256834 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:09 crc kubenswrapper[5017]: I0122 14:05:09.256911 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:09 crc kubenswrapper[5017]: I0122 14:05:09.256927 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:09 crc kubenswrapper[5017]: E0122 14:05:09.257040 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:05:09 crc kubenswrapper[5017]: E0122 14:05:09.257235 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:05:09 crc kubenswrapper[5017]: E0122 14:05:09.257386 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:05:10 crc kubenswrapper[5017]: I0122 14:05:10.257514 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:10 crc kubenswrapper[5017]: E0122 14:05:10.257683 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:05:10 crc kubenswrapper[5017]: E0122 14:05:10.353744 5017 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 14:05:11 crc kubenswrapper[5017]: I0122 14:05:11.257273 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:11 crc kubenswrapper[5017]: E0122 14:05:11.257383 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:05:11 crc kubenswrapper[5017]: I0122 14:05:11.257539 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:11 crc kubenswrapper[5017]: E0122 14:05:11.257579 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:05:11 crc kubenswrapper[5017]: I0122 14:05:11.258266 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:11 crc kubenswrapper[5017]: E0122 14:05:11.258341 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:05:12 crc kubenswrapper[5017]: I0122 14:05:12.257450 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:12 crc kubenswrapper[5017]: E0122 14:05:12.257823 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:05:12 crc kubenswrapper[5017]: I0122 14:05:12.257877 5017 scope.go:117] "RemoveContainer" containerID="5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f" Jan 22 14:05:13 crc kubenswrapper[5017]: I0122 14:05:13.176146 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2qmt_c223e7fe-8360-4731-a32d-3cf60ed7324b/kube-multus/1.log" Jan 22 14:05:13 crc kubenswrapper[5017]: I0122 14:05:13.176459 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2qmt" event={"ID":"c223e7fe-8360-4731-a32d-3cf60ed7324b","Type":"ContainerStarted","Data":"d04e65bff2218b2db1e8def603023e29a162b0a2a77092623bd04d9e54430afd"} Jan 22 14:05:13 crc kubenswrapper[5017]: I0122 14:05:13.256986 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:13 crc kubenswrapper[5017]: I0122 14:05:13.257023 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:13 crc kubenswrapper[5017]: I0122 14:05:13.257099 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:13 crc kubenswrapper[5017]: E0122 14:05:13.257203 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:05:13 crc kubenswrapper[5017]: E0122 14:05:13.257275 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:05:13 crc kubenswrapper[5017]: E0122 14:05:13.257406 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:05:14 crc kubenswrapper[5017]: I0122 14:05:14.257585 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:14 crc kubenswrapper[5017]: E0122 14:05:14.257747 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:05:15 crc kubenswrapper[5017]: I0122 14:05:15.257370 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:15 crc kubenswrapper[5017]: I0122 14:05:15.257410 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:15 crc kubenswrapper[5017]: E0122 14:05:15.258231 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gc29v" podUID="bca74e01-1d78-4eeb-b3db-842fda0babd7" Jan 22 14:05:15 crc kubenswrapper[5017]: I0122 14:05:15.258262 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:15 crc kubenswrapper[5017]: E0122 14:05:15.258368 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:05:15 crc kubenswrapper[5017]: E0122 14:05:15.258528 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:05:16 crc kubenswrapper[5017]: I0122 14:05:16.257521 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:16 crc kubenswrapper[5017]: I0122 14:05:16.259721 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 22 14:05:16 crc kubenswrapper[5017]: I0122 14:05:16.259822 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 22 14:05:17 crc kubenswrapper[5017]: I0122 14:05:17.257252 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:17 crc kubenswrapper[5017]: I0122 14:05:17.257321 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:17 crc kubenswrapper[5017]: I0122 14:05:17.257333 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:17 crc kubenswrapper[5017]: I0122 14:05:17.260621 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 22 14:05:17 crc kubenswrapper[5017]: I0122 14:05:17.260702 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 22 14:05:17 crc kubenswrapper[5017]: I0122 14:05:17.260835 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 22 14:05:17 crc kubenswrapper[5017]: I0122 14:05:17.262217 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 22 14:05:19 crc kubenswrapper[5017]: I0122 14:05:19.116737 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:05:20 crc kubenswrapper[5017]: I0122 14:05:20.984586 5017 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.021251 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gbbvd"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.021669 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.025244 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.025737 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.026620 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.029855 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.038889 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.038989 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.039175 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.039200 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.039458 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.039642 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.040001 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.040348 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.040569 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.041496 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.046631 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-l57n2"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.047428 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.047556 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.048429 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.048489 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-t47kf"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.049100 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.050162 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.050458 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.051033 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qtbg9"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.051816 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.057693 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.057693 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.059295 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.060420 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.065579 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.066017 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.066041 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.066218 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.066472 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.066617 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.066623 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.066603 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.066887 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067018 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067096 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067211 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067291 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067375 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067213 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067421 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067377 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067541 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067579 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.067613 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.068802 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.068900 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.068988 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069051 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069139 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069215 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069260 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069318 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069390 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069449 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069484 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069485 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069525 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069496 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069593 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069658 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069713 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069753 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.069718 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.072980 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.073445 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.074059 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.082390 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.082814 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-p4xvc"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.083146 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.083364 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-sk2hg"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.083624 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.084046 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-sk2hg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.084538 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.084760 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.085153 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.085628 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.085861 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.088404 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-j4qnq"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.098259 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.100300 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.102597 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.107286 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.107811 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.108045 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.109203 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.109538 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.110883 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.117332 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.117351 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.144450 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.147002 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.147676 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a853328-872d-4a4e-98d1-681be7eb3603-serving-cert\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.148037 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.148425 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.148483 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.148692 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/382be394-5527-4c07-9600-f70cb1c1ac18-serving-cert\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.148780 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/382be394-5527-4c07-9600-f70cb1c1ac18-audit-policies\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.148808 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-config\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.148962 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/58666604-0909-4502-a493-94f59c37556b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-66rfh\" (UID: \"58666604-0909-4502-a493-94f59c37556b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.148988 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/382be394-5527-4c07-9600-f70cb1c1ac18-encryption-config\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.149059 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnrb9\" (UniqueName: \"kubernetes.io/projected/58666604-0909-4502-a493-94f59c37556b-kube-api-access-wnrb9\") pod \"cluster-samples-operator-665b6dd947-66rfh\" (UID: \"58666604-0909-4502-a493-94f59c37556b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.149215 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwddp\" (UniqueName: \"kubernetes.io/projected/382be394-5527-4c07-9600-f70cb1c1ac18-kube-api-access-wwddp\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.149278 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-client-ca\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.149360 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/382be394-5527-4c07-9600-f70cb1c1ac18-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.149405 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/382be394-5527-4c07-9600-f70cb1c1ac18-audit-dir\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.149435 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/382be394-5527-4c07-9600-f70cb1c1ac18-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.149468 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/382be394-5527-4c07-9600-f70cb1c1ac18-etcd-client\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.149509 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j48bd\" (UniqueName: \"kubernetes.io/projected/1a853328-872d-4a4e-98d1-681be7eb3603-kube-api-access-j48bd\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.149695 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.150720 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.151075 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.152071 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.153107 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-l5x2x"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.153663 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7pt94"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.153897 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.154241 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.154944 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.155267 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.158309 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.158434 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.158586 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.158750 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.158887 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.158894 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.159521 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.159659 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.159724 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.160187 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.160322 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.160626 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.160911 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.160934 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.160922 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.161144 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.161255 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.161466 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-tm429"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.162987 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.163641 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tn54n"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.164150 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.165088 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.165830 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.172696 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.173287 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.173367 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.173423 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.173510 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.173549 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.174639 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.175892 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.178726 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-dpdfj"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.180148 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.180966 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.181270 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.182681 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.190685 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.191547 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.194678 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gbbvd"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.194796 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.195365 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.197464 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.198937 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.200332 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.204688 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.207060 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.210385 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.210629 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.211069 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7h4hj"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.211935 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.212225 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.212362 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.213458 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.214298 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.214406 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-8ljnf"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.214485 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.214879 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.216089 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hq6wt"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.216612 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.218511 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.219396 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.219522 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.220668 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-v77t5"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.220787 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.221347 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.221659 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.222278 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.222945 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.223338 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.223410 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.223499 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.224505 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.224593 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-l57n2"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.226125 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.226785 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-t47kf"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.226951 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.227536 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-ssjmv"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.228095 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.230564 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.233017 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.235743 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.236919 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-j4qnq"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.240189 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.240978 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hq6wt"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.244237 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7pt94"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.245375 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-p4xvc"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.247375 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.249989 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250649 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwddp\" (UniqueName: \"kubernetes.io/projected/382be394-5527-4c07-9600-f70cb1c1ac18-kube-api-access-wwddp\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250683 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8a856b4a-e13f-4691-8019-ff1939ba7726-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250705 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-client-ca\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250723 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-trusted-ca\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250749 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps8pj\" (UniqueName: \"kubernetes.io/projected/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-kube-api-access-ps8pj\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250768 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-oauth-serving-cert\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250785 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-client-ca\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250805 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s49bd\" (UniqueName: \"kubernetes.io/projected/9ebc6dfd-7e32-4514-84be-0c290fc2c616-kube-api-access-s49bd\") pod \"openshift-controller-manager-operator-756b6f6bc6-k82zx\" (UID: \"9ebc6dfd-7e32-4514-84be-0c290fc2c616\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250823 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-image-import-ca\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250845 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-console-config\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250870 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vd5j\" (UniqueName: \"kubernetes.io/projected/8a856b4a-e13f-4691-8019-ff1939ba7726-kube-api-access-9vd5j\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250897 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ee65bdd-e09e-4bf1-8025-ec86426073d1-config\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250915 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a856b4a-e13f-4691-8019-ff1939ba7726-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250935 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-serving-cert\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.250959 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-etcd-serving-ca\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251015 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-node-pullsecrets\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251037 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a856b4a-e13f-4691-8019-ff1939ba7726-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251056 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/382be394-5527-4c07-9600-f70cb1c1ac18-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251084 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-config\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251101 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-serving-cert\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251138 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-encryption-config\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251155 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4qst\" (UniqueName: \"kubernetes.io/projected/1ee65bdd-e09e-4bf1-8025-ec86426073d1-kube-api-access-f4qst\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251172 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0bc6e378-496f-4a54-946a-99518f0d3570-machine-approver-tls\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251198 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251214 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-h7glj\" (UID: \"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251230 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/382be394-5527-4c07-9600-f70cb1c1ac18-audit-dir\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251246 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/910fc4c2-7723-4380-89d8-9988bb9e53c8-service-ca-bundle\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251268 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/910fc4c2-7723-4380-89d8-9988bb9e53c8-serving-cert\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251286 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn7dp\" (UniqueName: \"kubernetes.io/projected/0e0b63e6-f2e1-4ed1-8501-3d0420964499-kube-api-access-hn7dp\") pod \"downloads-7954f5f757-sk2hg\" (UID: \"0e0b63e6-f2e1-4ed1-8501-3d0420964499\") " pod="openshift-console/downloads-7954f5f757-sk2hg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251302 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/382be394-5527-4c07-9600-f70cb1c1ac18-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251320 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97bz8\" (UniqueName: \"kubernetes.io/projected/910fc4c2-7723-4380-89d8-9988bb9e53c8-kube-api-access-97bz8\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251338 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ebc6dfd-7e32-4514-84be-0c290fc2c616-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-k82zx\" (UID: \"9ebc6dfd-7e32-4514-84be-0c290fc2c616\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251356 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-serving-cert\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251373 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-serving-cert\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251388 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/382be394-5527-4c07-9600-f70cb1c1ac18-etcd-client\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251403 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/910fc4c2-7723-4380-89d8-9988bb9e53c8-config\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251420 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-audit\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251435 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-etcd-client\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251455 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/910fc4c2-7723-4380-89d8-9988bb9e53c8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251473 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j48bd\" (UniqueName: \"kubernetes.io/projected/1a853328-872d-4a4e-98d1-681be7eb3603-kube-api-access-j48bd\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251488 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-service-ca\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251508 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-config\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251525 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a853328-872d-4a4e-98d1-681be7eb3603-serving-cert\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251541 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ebc6dfd-7e32-4514-84be-0c290fc2c616-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-k82zx\" (UID: \"9ebc6dfd-7e32-4514-84be-0c290fc2c616\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251556 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-audit-dir\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251572 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-config\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251591 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-h7glj\" (UID: \"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251606 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c6qn\" (UniqueName: \"kubernetes.io/projected/ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d-kube-api-access-5c6qn\") pod \"openshift-apiserver-operator-796bbdcf4f-h7glj\" (UID: \"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251629 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1ee65bdd-e09e-4bf1-8025-ec86426073d1-images\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251644 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/382be394-5527-4c07-9600-f70cb1c1ac18-serving-cert\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251661 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkn4n\" (UniqueName: \"kubernetes.io/projected/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-kube-api-access-lkn4n\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251679 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0bc6e378-496f-4a54-946a-99518f0d3570-auth-proxy-config\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251694 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bc6e378-496f-4a54-946a-99518f0d3570-config\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251720 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/382be394-5527-4c07-9600-f70cb1c1ac18-audit-policies\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251737 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-config\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251752 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-trusted-ca-bundle\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251797 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/58666604-0909-4502-a493-94f59c37556b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-66rfh\" (UID: \"58666604-0909-4502-a493-94f59c37556b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251818 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnrb9\" (UniqueName: \"kubernetes.io/projected/58666604-0909-4502-a493-94f59c37556b-kube-api-access-wnrb9\") pod \"cluster-samples-operator-665b6dd947-66rfh\" (UID: \"58666604-0909-4502-a493-94f59c37556b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251836 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/382be394-5527-4c07-9600-f70cb1c1ac18-encryption-config\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251858 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hcrb\" (UniqueName: \"kubernetes.io/projected/0bc6e378-496f-4a54-946a-99518f0d3570-kube-api-access-4hcrb\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251878 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251899 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrw4v\" (UniqueName: \"kubernetes.io/projected/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-kube-api-access-jrw4v\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251920 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7lqv\" (UniqueName: \"kubernetes.io/projected/66a013c9-01a4-4027-99ec-185312c81a41-kube-api-access-s7lqv\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251940 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1ee65bdd-e09e-4bf1-8025-ec86426073d1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.251960 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-oauth-config\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.253173 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.253196 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.253831 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.255828 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-client-ca\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.256263 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/382be394-5527-4c07-9600-f70cb1c1ac18-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.256300 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/382be394-5527-4c07-9600-f70cb1c1ac18-audit-dir\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.256743 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/382be394-5527-4c07-9600-f70cb1c1ac18-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.258803 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.260168 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/382be394-5527-4c07-9600-f70cb1c1ac18-audit-policies\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.261640 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-config\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.264337 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.271848 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/382be394-5527-4c07-9600-f70cb1c1ac18-serving-cert\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.272949 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/382be394-5527-4c07-9600-f70cb1c1ac18-etcd-client\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.282394 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/382be394-5527-4c07-9600-f70cb1c1ac18-encryption-config\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.283045 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a853328-872d-4a4e-98d1-681be7eb3603-serving-cert\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.283612 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/58666604-0909-4502-a493-94f59c37556b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-66rfh\" (UID: \"58666604-0909-4502-a493-94f59c37556b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.284775 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.290688 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7h4hj"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.290729 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.290741 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.290767 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.290780 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.290791 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qtbg9"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.290801 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-dpdfj"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.290812 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.292601 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.294081 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.295191 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-pdfrn"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.297052 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.297187 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.297911 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4j8zp"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.299295 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.299525 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tn54n"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.300753 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.301916 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-sk2hg"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.303256 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.303395 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.304578 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-8ljnf"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.305859 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-l5x2x"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.307186 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-v77t5"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.308444 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.309634 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.311174 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4j8zp"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.312533 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ssjmv"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.313648 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-r84vw"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.314719 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-r84vw"] Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.314907 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-r84vw" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.324693 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.344641 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.352875 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5-signing-key\") pod \"service-ca-9c57cc56f-8ljnf\" (UID: \"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5\") " pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.352922 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk84t\" (UniqueName: \"kubernetes.io/projected/e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5-kube-api-access-rk84t\") pod \"service-ca-9c57cc56f-8ljnf\" (UID: \"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5\") " pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.352945 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqzng\" (UniqueName: \"kubernetes.io/projected/b792244f-2eec-407d-a684-adad1b2ec199-kube-api-access-qqzng\") pod \"migrator-59844c95c7-2lp2r\" (UID: \"b792244f-2eec-407d-a684-adad1b2ec199\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.352965 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1ee65bdd-e09e-4bf1-8025-ec86426073d1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353001 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hcrb\" (UniqueName: \"kubernetes.io/projected/0bc6e378-496f-4a54-946a-99518f0d3570-kube-api-access-4hcrb\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353020 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353036 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrw4v\" (UniqueName: \"kubernetes.io/projected/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-kube-api-access-jrw4v\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353132 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7lqv\" (UniqueName: \"kubernetes.io/projected/66a013c9-01a4-4027-99ec-185312c81a41-kube-api-access-s7lqv\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353187 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/239e0484-3bc8-4628-ba87-d550ce2abb13-etcd-ca\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353254 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-oauth-config\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353320 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5drgq\" (UniqueName: \"kubernetes.io/projected/90b0ba9d-f818-4059-a895-fe9947511987-kube-api-access-5drgq\") pod \"control-plane-machine-set-operator-78cbb6b69f-5w4zs\" (UID: \"90b0ba9d-f818-4059-a895-fe9947511987\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353358 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-serving-cert\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353508 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/73bf70af-455b-4c32-8dda-324c3aa3d7b7-proxy-tls\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353605 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8a856b4a-e13f-4691-8019-ff1939ba7726-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353670 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-client-ca\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353707 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-trusted-ca\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353786 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-oauth-serving-cert\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353837 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf6z8\" (UniqueName: \"kubernetes.io/projected/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-kube-api-access-lf6z8\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353868 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps8pj\" (UniqueName: \"kubernetes.io/projected/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-kube-api-access-ps8pj\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353945 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s49bd\" (UniqueName: \"kubernetes.io/projected/9ebc6dfd-7e32-4514-84be-0c290fc2c616-kube-api-access-s49bd\") pod \"openshift-controller-manager-operator-756b6f6bc6-k82zx\" (UID: \"9ebc6dfd-7e32-4514-84be-0c290fc2c616\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.353970 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-image-import-ca\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354018 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-console-config\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354049 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44587741-560f-4a80-8497-4034c8f5c80a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xtlnn\" (UID: \"44587741-560f-4a80-8497-4034c8f5c80a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354094 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/239e0484-3bc8-4628-ba87-d550ce2abb13-serving-cert\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354153 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/331d69d8-6f0e-418f-baef-1162a103675e-serving-cert\") pod \"openshift-config-operator-7777fb866f-r4qhd\" (UID: \"331d69d8-6f0e-418f-baef-1162a103675e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354188 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vd5j\" (UniqueName: \"kubernetes.io/projected/8a856b4a-e13f-4691-8019-ff1939ba7726-kube-api-access-9vd5j\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354234 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff503dda-7e52-4cb1-920f-bea378ae7a10-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-g77fg\" (UID: \"ff503dda-7e52-4cb1-920f-bea378ae7a10\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354256 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/745626f0-e2c2-4916-b91b-41c204d3a825-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354275 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/44587741-560f-4a80-8497-4034c8f5c80a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xtlnn\" (UID: \"44587741-560f-4a80-8497-4034c8f5c80a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354323 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ee65bdd-e09e-4bf1-8025-ec86426073d1-config\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354343 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a856b4a-e13f-4691-8019-ff1939ba7726-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354366 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-serving-cert\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354387 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9n22\" (UniqueName: \"kubernetes.io/projected/99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f-kube-api-access-w9n22\") pod \"multus-admission-controller-857f4d67dd-hq6wt\" (UID: \"99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354410 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44587741-560f-4a80-8497-4034c8f5c80a-config\") pod \"kube-apiserver-operator-766d6c64bb-xtlnn\" (UID: \"44587741-560f-4a80-8497-4034c8f5c80a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354442 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/239e0484-3bc8-4628-ba87-d550ce2abb13-etcd-service-ca\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354464 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-etcd-serving-ca\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354485 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm8tf\" (UniqueName: \"kubernetes.io/projected/faf7a532-753d-4862-812c-3ef174c75f81-kube-api-access-fm8tf\") pod \"marketplace-operator-79b997595-7h4hj\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354610 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-node-pullsecrets\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354637 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7h4hj\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354657 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/73bf70af-455b-4c32-8dda-324c3aa3d7b7-images\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354681 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a856b4a-e13f-4691-8019-ff1939ba7726-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354709 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/90b0ba9d-f818-4059-a895-fe9947511987-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5w4zs\" (UID: \"90b0ba9d-f818-4059-a895-fe9947511987\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354731 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbjjk\" (UniqueName: \"kubernetes.io/projected/188093c1-77b0-4e37-a53b-f3974ad22cff-kube-api-access-bbjjk\") pod \"collect-profiles-29484840-hsdps\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354755 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/331d69d8-6f0e-418f-baef-1162a103675e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-r4qhd\" (UID: \"331d69d8-6f0e-418f-baef-1162a103675e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354787 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/188093c1-77b0-4e37-a53b-f3974ad22cff-secret-volume\") pod \"collect-profiles-29484840-hsdps\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354816 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-config\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354833 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-serving-cert\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354851 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-encryption-config\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354870 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/239e0484-3bc8-4628-ba87-d550ce2abb13-etcd-client\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354872 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-trusted-ca\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354938 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4qst\" (UniqueName: \"kubernetes.io/projected/1ee65bdd-e09e-4bf1-8025-ec86426073d1-kube-api-access-f4qst\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.355194 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.354972 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0bc6e378-496f-4a54-946a-99518f0d3570-machine-approver-tls\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.355340 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.355760 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/be128e90-65d6-43a9-9714-a031f24f23c4-proxy-tls\") pod \"machine-config-controller-84d6567774-l5nvn\" (UID: \"be128e90-65d6-43a9-9714-a031f24f23c4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.355854 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/be128e90-65d6-43a9-9714-a031f24f23c4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-l5nvn\" (UID: \"be128e90-65d6-43a9-9714-a031f24f23c4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.355915 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-h7glj\" (UID: \"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.355949 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/910fc4c2-7723-4380-89d8-9988bb9e53c8-service-ca-bundle\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.355976 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff503dda-7e52-4cb1-920f-bea378ae7a10-config\") pod \"kube-controller-manager-operator-78b949d7b-g77fg\" (UID: \"ff503dda-7e52-4cb1-920f-bea378ae7a10\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.355999 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57kxf\" (UniqueName: \"kubernetes.io/projected/73bf70af-455b-4c32-8dda-324c3aa3d7b7-kube-api-access-57kxf\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356028 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/188093c1-77b0-4e37-a53b-f3974ad22cff-config-volume\") pod \"collect-profiles-29484840-hsdps\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356054 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-node-pullsecrets\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356060 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/910fc4c2-7723-4380-89d8-9988bb9e53c8-serving-cert\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356084 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-image-import-ca\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356029 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-config\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356176 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn7dp\" (UniqueName: \"kubernetes.io/projected/0e0b63e6-f2e1-4ed1-8501-3d0420964499-kube-api-access-hn7dp\") pod \"downloads-7954f5f757-sk2hg\" (UID: \"0e0b63e6-f2e1-4ed1-8501-3d0420964499\") " pod="openshift-console/downloads-7954f5f757-sk2hg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356203 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7h4hj\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356229 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjt6c\" (UniqueName: \"kubernetes.io/projected/be128e90-65d6-43a9-9714-a031f24f23c4-kube-api-access-xjt6c\") pod \"machine-config-controller-84d6567774-l5nvn\" (UID: \"be128e90-65d6-43a9-9714-a031f24f23c4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356255 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff503dda-7e52-4cb1-920f-bea378ae7a10-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-g77fg\" (UID: \"ff503dda-7e52-4cb1-920f-bea378ae7a10\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356276 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/73bf70af-455b-4c32-8dda-324c3aa3d7b7-auth-proxy-config\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356308 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97bz8\" (UniqueName: \"kubernetes.io/projected/910fc4c2-7723-4380-89d8-9988bb9e53c8-kube-api-access-97bz8\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356334 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ebc6dfd-7e32-4514-84be-0c290fc2c616-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-k82zx\" (UID: \"9ebc6dfd-7e32-4514-84be-0c290fc2c616\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356360 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xrrlz\" (UID: \"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356384 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xlgn\" (UniqueName: \"kubernetes.io/projected/96d2a4db-9f82-40e1-90fd-4f9ef3927f39-kube-api-access-4xlgn\") pod \"package-server-manager-789f6589d5-htpml\" (UID: \"96d2a4db-9f82-40e1-90fd-4f9ef3927f39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356424 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-serving-cert\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356447 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-serving-cert\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356478 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hq6wt\" (UID: \"99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356499 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xrrlz\" (UID: \"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356519 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc5nq\" (UniqueName: \"kubernetes.io/projected/239e0484-3bc8-4628-ba87-d550ce2abb13-kube-api-access-kc5nq\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.356824 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/910fc4c2-7723-4380-89d8-9988bb9e53c8-service-ca-bundle\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.357012 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-console-config\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.357059 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/910fc4c2-7723-4380-89d8-9988bb9e53c8-config\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.357100 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-audit\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.357142 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-etcd-client\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.357419 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-h7glj\" (UID: \"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.357473 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-oauth-serving-cert\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.357802 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ee65bdd-e09e-4bf1-8025-ec86426073d1-config\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.357990 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/910fc4c2-7723-4380-89d8-9988bb9e53c8-config\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.358028 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.358085 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-client-ca\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.358140 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/910fc4c2-7723-4380-89d8-9988bb9e53c8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.358477 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-service-ca\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.358529 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-audit\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.358537 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/96d2a4db-9f82-40e1-90fd-4f9ef3927f39-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-htpml\" (UID: \"96d2a4db-9f82-40e1-90fd-4f9ef3927f39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.358572 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzpcp\" (UniqueName: \"kubernetes.io/projected/331d69d8-6f0e-418f-baef-1162a103675e-kube-api-access-xzpcp\") pod \"openshift-config-operator-7777fb866f-r4qhd\" (UID: \"331d69d8-6f0e-418f-baef-1162a103675e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.358986 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/910fc4c2-7723-4380-89d8-9988bb9e53c8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359266 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-config\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359322 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/745626f0-e2c2-4916-b91b-41c204d3a825-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359351 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-config\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359381 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ebc6dfd-7e32-4514-84be-0c290fc2c616-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-k82zx\" (UID: \"9ebc6dfd-7e32-4514-84be-0c290fc2c616\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359393 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ebc6dfd-7e32-4514-84be-0c290fc2c616-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-k82zx\" (UID: \"9ebc6dfd-7e32-4514-84be-0c290fc2c616\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359471 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-audit-dir\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359519 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-config\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359546 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1ee65bdd-e09e-4bf1-8025-ec86426073d1-images\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359624 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-h7glj\" (UID: \"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359656 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c6qn\" (UniqueName: \"kubernetes.io/projected/ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d-kube-api-access-5c6qn\") pod \"openshift-apiserver-operator-796bbdcf4f-h7glj\" (UID: \"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359689 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-serving-cert\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359696 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7cvp\" (UniqueName: \"kubernetes.io/projected/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-kube-api-access-t7cvp\") pod \"kube-storage-version-migrator-operator-b67b599dd-xrrlz\" (UID: \"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359834 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkn4n\" (UniqueName: \"kubernetes.io/projected/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-kube-api-access-lkn4n\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359865 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0bc6e378-496f-4a54-946a-99518f0d3570-auth-proxy-config\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359887 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bc6e378-496f-4a54-946a-99518f0d3570-config\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359950 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/239e0484-3bc8-4628-ba87-d550ce2abb13-config\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359976 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-trusted-ca-bundle\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.359997 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/745626f0-e2c2-4916-b91b-41c204d3a825-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.360223 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-service-ca\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.360515 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bc6e378-496f-4a54-946a-99518f0d3570-config\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.360583 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1ee65bdd-e09e-4bf1-8025-ec86426073d1-images\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.360745 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-encryption-config\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.360919 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-config\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.361018 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-audit-dir\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.361083 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0bc6e378-496f-4a54-946a-99518f0d3570-auth-proxy-config\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.361142 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5-signing-cabundle\") pod \"service-ca-9c57cc56f-8ljnf\" (UID: \"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5\") " pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.361478 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-etcd-serving-ca\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.362075 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/910fc4c2-7723-4380-89d8-9988bb9e53c8-serving-cert\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.362164 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-config\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.362178 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ebc6dfd-7e32-4514-84be-0c290fc2c616-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-k82zx\" (UID: \"9ebc6dfd-7e32-4514-84be-0c290fc2c616\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.362553 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-serving-cert\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.362580 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-etcd-client\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.362994 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-serving-cert\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.363498 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-h7glj\" (UID: \"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.363597 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a856b4a-e13f-4691-8019-ff1939ba7726-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.363574 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-trusted-ca-bundle\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.364064 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1ee65bdd-e09e-4bf1-8025-ec86426073d1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.364121 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.364640 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0bc6e378-496f-4a54-946a-99518f0d3570-machine-approver-tls\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.365012 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8a856b4a-e13f-4691-8019-ff1939ba7726-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.366789 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-oauth-config\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.366814 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-serving-cert\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.385044 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.404187 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.424235 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.444157 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.461842 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/96d2a4db-9f82-40e1-90fd-4f9ef3927f39-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-htpml\" (UID: \"96d2a4db-9f82-40e1-90fd-4f9ef3927f39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.461899 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzpcp\" (UniqueName: \"kubernetes.io/projected/331d69d8-6f0e-418f-baef-1162a103675e-kube-api-access-xzpcp\") pod \"openshift-config-operator-7777fb866f-r4qhd\" (UID: \"331d69d8-6f0e-418f-baef-1162a103675e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462148 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/745626f0-e2c2-4916-b91b-41c204d3a825-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462190 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-config\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462232 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7cvp\" (UniqueName: \"kubernetes.io/projected/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-kube-api-access-t7cvp\") pod \"kube-storage-version-migrator-operator-b67b599dd-xrrlz\" (UID: \"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462295 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/239e0484-3bc8-4628-ba87-d550ce2abb13-config\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462318 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/745626f0-e2c2-4916-b91b-41c204d3a825-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462410 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5-signing-cabundle\") pod \"service-ca-9c57cc56f-8ljnf\" (UID: \"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5\") " pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462461 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqzng\" (UniqueName: \"kubernetes.io/projected/b792244f-2eec-407d-a684-adad1b2ec199-kube-api-access-qqzng\") pod \"migrator-59844c95c7-2lp2r\" (UID: \"b792244f-2eec-407d-a684-adad1b2ec199\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462490 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5-signing-key\") pod \"service-ca-9c57cc56f-8ljnf\" (UID: \"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5\") " pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462511 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk84t\" (UniqueName: \"kubernetes.io/projected/e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5-kube-api-access-rk84t\") pod \"service-ca-9c57cc56f-8ljnf\" (UID: \"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5\") " pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462538 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/239e0484-3bc8-4628-ba87-d550ce2abb13-etcd-ca\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462581 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5drgq\" (UniqueName: \"kubernetes.io/projected/90b0ba9d-f818-4059-a895-fe9947511987-kube-api-access-5drgq\") pod \"control-plane-machine-set-operator-78cbb6b69f-5w4zs\" (UID: \"90b0ba9d-f818-4059-a895-fe9947511987\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462605 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-serving-cert\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462628 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/73bf70af-455b-4c32-8dda-324c3aa3d7b7-proxy-tls\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462734 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf6z8\" (UniqueName: \"kubernetes.io/projected/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-kube-api-access-lf6z8\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462762 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44587741-560f-4a80-8497-4034c8f5c80a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xtlnn\" (UID: \"44587741-560f-4a80-8497-4034c8f5c80a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462787 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/239e0484-3bc8-4628-ba87-d550ce2abb13-serving-cert\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462817 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/331d69d8-6f0e-418f-baef-1162a103675e-serving-cert\") pod \"openshift-config-operator-7777fb866f-r4qhd\" (UID: \"331d69d8-6f0e-418f-baef-1162a103675e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462849 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff503dda-7e52-4cb1-920f-bea378ae7a10-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-g77fg\" (UID: \"ff503dda-7e52-4cb1-920f-bea378ae7a10\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462872 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/745626f0-e2c2-4916-b91b-41c204d3a825-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462899 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/44587741-560f-4a80-8497-4034c8f5c80a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xtlnn\" (UID: \"44587741-560f-4a80-8497-4034c8f5c80a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462923 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9n22\" (UniqueName: \"kubernetes.io/projected/99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f-kube-api-access-w9n22\") pod \"multus-admission-controller-857f4d67dd-hq6wt\" (UID: \"99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462948 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44587741-560f-4a80-8497-4034c8f5c80a-config\") pod \"kube-apiserver-operator-766d6c64bb-xtlnn\" (UID: \"44587741-560f-4a80-8497-4034c8f5c80a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462971 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/239e0484-3bc8-4628-ba87-d550ce2abb13-etcd-service-ca\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.462994 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm8tf\" (UniqueName: \"kubernetes.io/projected/faf7a532-753d-4862-812c-3ef174c75f81-kube-api-access-fm8tf\") pod \"marketplace-operator-79b997595-7h4hj\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463019 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/73bf70af-455b-4c32-8dda-324c3aa3d7b7-images\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463053 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7h4hj\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463086 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/90b0ba9d-f818-4059-a895-fe9947511987-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5w4zs\" (UID: \"90b0ba9d-f818-4059-a895-fe9947511987\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463131 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbjjk\" (UniqueName: \"kubernetes.io/projected/188093c1-77b0-4e37-a53b-f3974ad22cff-kube-api-access-bbjjk\") pod \"collect-profiles-29484840-hsdps\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463158 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/331d69d8-6f0e-418f-baef-1162a103675e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-r4qhd\" (UID: \"331d69d8-6f0e-418f-baef-1162a103675e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463182 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/188093c1-77b0-4e37-a53b-f3974ad22cff-secret-volume\") pod \"collect-profiles-29484840-hsdps\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463208 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/239e0484-3bc8-4628-ba87-d550ce2abb13-etcd-client\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463253 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/be128e90-65d6-43a9-9714-a031f24f23c4-proxy-tls\") pod \"machine-config-controller-84d6567774-l5nvn\" (UID: \"be128e90-65d6-43a9-9714-a031f24f23c4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463277 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/be128e90-65d6-43a9-9714-a031f24f23c4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-l5nvn\" (UID: \"be128e90-65d6-43a9-9714-a031f24f23c4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463301 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff503dda-7e52-4cb1-920f-bea378ae7a10-config\") pod \"kube-controller-manager-operator-78b949d7b-g77fg\" (UID: \"ff503dda-7e52-4cb1-920f-bea378ae7a10\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463326 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57kxf\" (UniqueName: \"kubernetes.io/projected/73bf70af-455b-4c32-8dda-324c3aa3d7b7-kube-api-access-57kxf\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463352 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/188093c1-77b0-4e37-a53b-f3974ad22cff-config-volume\") pod \"collect-profiles-29484840-hsdps\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463373 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjt6c\" (UniqueName: \"kubernetes.io/projected/be128e90-65d6-43a9-9714-a031f24f23c4-kube-api-access-xjt6c\") pod \"machine-config-controller-84d6567774-l5nvn\" (UID: \"be128e90-65d6-43a9-9714-a031f24f23c4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463394 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff503dda-7e52-4cb1-920f-bea378ae7a10-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-g77fg\" (UID: \"ff503dda-7e52-4cb1-920f-bea378ae7a10\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463416 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/73bf70af-455b-4c32-8dda-324c3aa3d7b7-auth-proxy-config\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463443 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7h4hj\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463470 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xrrlz\" (UID: \"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463493 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xlgn\" (UniqueName: \"kubernetes.io/projected/96d2a4db-9f82-40e1-90fd-4f9ef3927f39-kube-api-access-4xlgn\") pod \"package-server-manager-789f6589d5-htpml\" (UID: \"96d2a4db-9f82-40e1-90fd-4f9ef3927f39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463515 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xrrlz\" (UID: \"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463533 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc5nq\" (UniqueName: \"kubernetes.io/projected/239e0484-3bc8-4628-ba87-d550ce2abb13-kube-api-access-kc5nq\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463553 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hq6wt\" (UID: \"99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.463854 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44587741-560f-4a80-8497-4034c8f5c80a-config\") pod \"kube-apiserver-operator-766d6c64bb-xtlnn\" (UID: \"44587741-560f-4a80-8497-4034c8f5c80a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.464091 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/331d69d8-6f0e-418f-baef-1162a103675e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-r4qhd\" (UID: \"331d69d8-6f0e-418f-baef-1162a103675e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.464405 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.464672 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/73bf70af-455b-4c32-8dda-324c3aa3d7b7-auth-proxy-config\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.464775 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/be128e90-65d6-43a9-9714-a031f24f23c4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-l5nvn\" (UID: \"be128e90-65d6-43a9-9714-a031f24f23c4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.465869 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44587741-560f-4a80-8497-4034c8f5c80a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xtlnn\" (UID: \"44587741-560f-4a80-8497-4034c8f5c80a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.466463 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/331d69d8-6f0e-418f-baef-1162a103675e-serving-cert\") pod \"openshift-config-operator-7777fb866f-r4qhd\" (UID: \"331d69d8-6f0e-418f-baef-1162a103675e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.485683 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.509193 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.523955 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.544249 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.555804 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff503dda-7e52-4cb1-920f-bea378ae7a10-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-g77fg\" (UID: \"ff503dda-7e52-4cb1-920f-bea378ae7a10\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.565740 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.584020 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.594996 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff503dda-7e52-4cb1-920f-bea378ae7a10-config\") pod \"kube-controller-manager-operator-78b949d7b-g77fg\" (UID: \"ff503dda-7e52-4cb1-920f-bea378ae7a10\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.605407 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.624532 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.644010 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.657074 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/239e0484-3bc8-4628-ba87-d550ce2abb13-etcd-client\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.664779 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.684793 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.703358 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.723983 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.735399 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/239e0484-3bc8-4628-ba87-d550ce2abb13-serving-cert\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.744222 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.754671 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/239e0484-3bc8-4628-ba87-d550ce2abb13-etcd-service-ca\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.763883 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.773868 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/239e0484-3bc8-4628-ba87-d550ce2abb13-config\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.783754 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.794050 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/239e0484-3bc8-4628-ba87-d550ce2abb13-etcd-ca\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.805179 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.825157 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.864627 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.884338 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.893847 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/73bf70af-455b-4c32-8dda-324c3aa3d7b7-images\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.903801 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.917883 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/73bf70af-455b-4c32-8dda-324c3aa3d7b7-proxy-tls\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.931054 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.945291 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.964930 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 22 14:05:21 crc kubenswrapper[5017]: I0122 14:05:21.984190 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.004289 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.023879 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.044479 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.057542 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/188093c1-77b0-4e37-a53b-f3974ad22cff-secret-volume\") pod \"collect-profiles-29484840-hsdps\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.064629 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.084236 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.104581 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.124654 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.144781 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.164411 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.177864 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7h4hj\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.191768 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.195477 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7h4hj\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.204444 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.222301 5017 request.go:700] Waited for 1.00782668s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-api/secrets?fieldSelector=metadata.name%3Dcontrol-plane-machine-set-operator-tls&limit=500&resourceVersion=0 Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.223772 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.237635 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/90b0ba9d-f818-4059-a895-fe9947511987-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5w4zs\" (UID: \"90b0ba9d-f818-4059-a895-fe9947511987\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.244350 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.264064 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.283173 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.304073 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.305050 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5-signing-cabundle\") pod \"service-ca-9c57cc56f-8ljnf\" (UID: \"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5\") " pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.323802 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.335638 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5-signing-key\") pod \"service-ca-9c57cc56f-8ljnf\" (UID: \"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5\") " pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.344059 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.363389 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.385978 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.397664 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hq6wt\" (UID: \"99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.404179 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.423852 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.444079 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.455652 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xrrlz\" (UID: \"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.462253 5017 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/package-server-manager-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.462327 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96d2a4db-9f82-40e1-90fd-4f9ef3927f39-package-server-manager-serving-cert podName:96d2a4db-9f82-40e1-90fd-4f9ef3927f39 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:22.962304866 +0000 UTC m=+137.954766734 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "package-server-manager-serving-cert" (UniqueName: "kubernetes.io/secret/96d2a4db-9f82-40e1-90fd-4f9ef3927f39-package-server-manager-serving-cert") pod "package-server-manager-789f6589d5-htpml" (UID: "96d2a4db-9f82-40e1-90fd-4f9ef3927f39") : failed to sync secret cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.462399 5017 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.462438 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-config podName:d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:22.962427819 +0000 UTC m=+137.954889677 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-config") pod "service-ca-operator-777779d784-v77t5" (UID: "d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7") : failed to sync configmap cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.462467 5017 configmap.go:193] Couldn't get configMap openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-config: failed to sync configmap cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.462486 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/745626f0-e2c2-4916-b91b-41c204d3a825-config podName:745626f0-e2c2-4916-b91b-41c204d3a825 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:22.962480721 +0000 UTC m=+137.954942579 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/745626f0-e2c2-4916-b91b-41c204d3a825-config") pod "openshift-kube-scheduler-operator-5fdd9b5758-gkh55" (UID: "745626f0-e2c2-4916-b91b-41c204d3a825") : failed to sync configmap cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.463607 5017 secret.go:188] Couldn't get secret openshift-kube-scheduler-operator/kube-scheduler-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.463695 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/745626f0-e2c2-4916-b91b-41c204d3a825-serving-cert podName:745626f0-e2c2-4916-b91b-41c204d3a825 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:22.963687515 +0000 UTC m=+137.956149373 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/745626f0-e2c2-4916-b91b-41c204d3a825-serving-cert") pod "openshift-kube-scheduler-operator-5fdd9b5758-gkh55" (UID: "745626f0-e2c2-4916-b91b-41c204d3a825") : failed to sync secret cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.463660 5017 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.463750 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-serving-cert podName:d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:22.963744197 +0000 UTC m=+137.956206045 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-serving-cert") pod "service-ca-operator-777779d784-v77t5" (UID: "d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7") : failed to sync secret cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.463768 5017 secret.go:188] Couldn't get secret openshift-machine-config-operator/mcc-proxy-tls: failed to sync secret cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.463807 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be128e90-65d6-43a9-9714-a031f24f23c4-proxy-tls podName:be128e90-65d6-43a9-9714-a031f24f23c4 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:22.963782688 +0000 UTC m=+137.956244546 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/be128e90-65d6-43a9-9714-a031f24f23c4-proxy-tls") pod "machine-config-controller-84d6567774-l5nvn" (UID: "be128e90-65d6-43a9-9714-a031f24f23c4") : failed to sync secret cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.463946 5017 secret.go:188] Couldn't get secret openshift-kube-storage-version-migrator-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.464063 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.464372 5017 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.464067 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-serving-cert podName:b4b0269e-1d32-4db4-bc9a-185eb9ebcc33 nodeName:}" failed. No retries permitted until 2026-01-22 14:05:22.964048816 +0000 UTC m=+137.956510674 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-serving-cert") pod "kube-storage-version-migrator-operator-b67b599dd-xrrlz" (UID: "b4b0269e-1d32-4db4-bc9a-185eb9ebcc33") : failed to sync secret cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: E0122 14:05:22.464484 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/188093c1-77b0-4e37-a53b-f3974ad22cff-config-volume podName:188093c1-77b0-4e37-a53b-f3974ad22cff nodeName:}" failed. No retries permitted until 2026-01-22 14:05:22.964473138 +0000 UTC m=+137.956934996 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/188093c1-77b0-4e37-a53b-f3974ad22cff-config-volume") pod "collect-profiles-29484840-hsdps" (UID: "188093c1-77b0-4e37-a53b-f3974ad22cff") : failed to sync configmap cache: timed out waiting for the condition Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.483430 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.504426 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.523881 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.544185 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.564535 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.584097 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.604661 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.624188 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.644608 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.664548 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.685038 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.705204 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.724984 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.744483 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.764277 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.783495 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.804478 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.823651 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.843909 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.864267 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.899500 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwddp\" (UniqueName: \"kubernetes.io/projected/382be394-5527-4c07-9600-f70cb1c1ac18-kube-api-access-wwddp\") pod \"apiserver-7bbb656c7d-b5f9x\" (UID: \"382be394-5527-4c07-9600-f70cb1c1ac18\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.920183 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j48bd\" (UniqueName: \"kubernetes.io/projected/1a853328-872d-4a4e-98d1-681be7eb3603-kube-api-access-j48bd\") pod \"route-controller-manager-6576b87f9c-rtv74\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.943036 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnrb9\" (UniqueName: \"kubernetes.io/projected/58666604-0909-4502-a493-94f59c37556b-kube-api-access-wnrb9\") pod \"cluster-samples-operator-665b6dd947-66rfh\" (UID: \"58666604-0909-4502-a493-94f59c37556b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.963827 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.984041 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.989564 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/745626f0-e2c2-4916-b91b-41c204d3a825-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.989688 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/be128e90-65d6-43a9-9714-a031f24f23c4-proxy-tls\") pod \"machine-config-controller-84d6567774-l5nvn\" (UID: \"be128e90-65d6-43a9-9714-a031f24f23c4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.989774 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/188093c1-77b0-4e37-a53b-f3974ad22cff-config-volume\") pod \"collect-profiles-29484840-hsdps\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.990021 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xrrlz\" (UID: \"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.990072 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/96d2a4db-9f82-40e1-90fd-4f9ef3927f39-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-htpml\" (UID: \"96d2a4db-9f82-40e1-90fd-4f9ef3927f39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.990819 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/188093c1-77b0-4e37-a53b-f3974ad22cff-config-volume\") pod \"collect-profiles-29484840-hsdps\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.990987 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-config\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.991075 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/745626f0-e2c2-4916-b91b-41c204d3a825-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.991221 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-serving-cert\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.992046 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-config\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.992518 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/745626f0-e2c2-4916-b91b-41c204d3a825-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.993351 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/745626f0-e2c2-4916-b91b-41c204d3a825-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.994500 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xrrlz\" (UID: \"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.994535 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/be128e90-65d6-43a9-9714-a031f24f23c4-proxy-tls\") pod \"machine-config-controller-84d6567774-l5nvn\" (UID: \"be128e90-65d6-43a9-9714-a031f24f23c4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.994669 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/96d2a4db-9f82-40e1-90fd-4f9ef3927f39-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-htpml\" (UID: \"96d2a4db-9f82-40e1-90fd-4f9ef3927f39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.995845 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-serving-cert\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:22 crc kubenswrapper[5017]: I0122 14:05:22.997769 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.007431 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.020610 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.024742 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.044661 5017 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.063590 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.087548 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.104839 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.124840 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.144799 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.173684 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74"] Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.185249 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7lqv\" (UniqueName: \"kubernetes.io/projected/66a013c9-01a4-4027-99ec-185312c81a41-kube-api-access-s7lqv\") pod \"console-f9d7485db-j4qnq\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.185976 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.203540 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hcrb\" (UniqueName: \"kubernetes.io/projected/0bc6e378-496f-4a54-946a-99518f0d3570-kube-api-access-4hcrb\") pod \"machine-approver-56656f9798-6vm24\" (UID: \"0bc6e378-496f-4a54-946a-99518f0d3570\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.211504 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh"] Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.219215 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" event={"ID":"1a853328-872d-4a4e-98d1-681be7eb3603","Type":"ContainerStarted","Data":"892864dc58350d7357b086aeec54ced5478efff0f7e2dc46e5f48ad6c1855280"} Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.219757 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrw4v\" (UniqueName: \"kubernetes.io/projected/b4ffbc55-9c5f-46c3-9c11-e868ad69196b-kube-api-access-jrw4v\") pod \"apiserver-76f77b778f-qtbg9\" (UID: \"b4ffbc55-9c5f-46c3-9c11-e868ad69196b\") " pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.222460 5017 request.go:700] Waited for 1.868401269s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console-operator/serviceaccounts/console-operator/token Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.241135 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps8pj\" (UniqueName: \"kubernetes.io/projected/2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1-kube-api-access-ps8pj\") pod \"console-operator-58897d9998-p4xvc\" (UID: \"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1\") " pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.264715 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s49bd\" (UniqueName: \"kubernetes.io/projected/9ebc6dfd-7e32-4514-84be-0c290fc2c616-kube-api-access-s49bd\") pod \"openshift-controller-manager-operator-756b6f6bc6-k82zx\" (UID: \"9ebc6dfd-7e32-4514-84be-0c290fc2c616\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.289422 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a856b4a-e13f-4691-8019-ff1939ba7726-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.300149 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.301105 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vd5j\" (UniqueName: \"kubernetes.io/projected/8a856b4a-e13f-4691-8019-ff1939ba7726-kube-api-access-9vd5j\") pod \"cluster-image-registry-operator-dc59b4c8b-wbqbm\" (UID: \"8a856b4a-e13f-4691-8019-ff1939ba7726\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.319251 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4qst\" (UniqueName: \"kubernetes.io/projected/1ee65bdd-e09e-4bf1-8025-ec86426073d1-kube-api-access-f4qst\") pod \"machine-api-operator-5694c8668f-t47kf\" (UID: \"1ee65bdd-e09e-4bf1-8025-ec86426073d1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.343412 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn7dp\" (UniqueName: \"kubernetes.io/projected/0e0b63e6-f2e1-4ed1-8501-3d0420964499-kube-api-access-hn7dp\") pod \"downloads-7954f5f757-sk2hg\" (UID: \"0e0b63e6-f2e1-4ed1-8501-3d0420964499\") " pod="openshift-console/downloads-7954f5f757-sk2hg" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.353568 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.359060 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97bz8\" (UniqueName: \"kubernetes.io/projected/910fc4c2-7723-4380-89d8-9988bb9e53c8-kube-api-access-97bz8\") pod \"authentication-operator-69f744f599-l57n2\" (UID: \"910fc4c2-7723-4380-89d8-9988bb9e53c8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.379659 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkn4n\" (UniqueName: \"kubernetes.io/projected/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-kube-api-access-lkn4n\") pod \"controller-manager-879f6c89f-gbbvd\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.388685 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.400372 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-sk2hg" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.400658 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c6qn\" (UniqueName: \"kubernetes.io/projected/ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d-kube-api-access-5c6qn\") pod \"openshift-apiserver-operator-796bbdcf4f-h7glj\" (UID: \"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.408619 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.414846 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.417282 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzpcp\" (UniqueName: \"kubernetes.io/projected/331d69d8-6f0e-418f-baef-1162a103675e-kube-api-access-xzpcp\") pod \"openshift-config-operator-7777fb866f-r4qhd\" (UID: \"331d69d8-6f0e-418f-baef-1162a103675e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.422640 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.448409 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/745626f0-e2c2-4916-b91b-41c204d3a825-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gkh55\" (UID: \"745626f0-e2c2-4916-b91b-41c204d3a825\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.451971 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.464318 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.468693 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7cvp\" (UniqueName: \"kubernetes.io/projected/b4b0269e-1d32-4db4-bc9a-185eb9ebcc33-kube-api-access-t7cvp\") pod \"kube-storage-version-migrator-operator-b67b599dd-xrrlz\" (UID: \"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.477060 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.484145 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqzng\" (UniqueName: \"kubernetes.io/projected/b792244f-2eec-407d-a684-adad1b2ec199-kube-api-access-qqzng\") pod \"migrator-59844c95c7-2lp2r\" (UID: \"b792244f-2eec-407d-a684-adad1b2ec199\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.509905 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5drgq\" (UniqueName: \"kubernetes.io/projected/90b0ba9d-f818-4059-a895-fe9947511987-kube-api-access-5drgq\") pod \"control-plane-machine-set-operator-78cbb6b69f-5w4zs\" (UID: \"90b0ba9d-f818-4059-a895-fe9947511987\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.520934 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qtbg9"] Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.522746 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.523127 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.536291 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf6z8\" (UniqueName: \"kubernetes.io/projected/d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7-kube-api-access-lf6z8\") pod \"service-ca-operator-777779d784-v77t5\" (UID: \"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.544015 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk84t\" (UniqueName: \"kubernetes.io/projected/e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5-kube-api-access-rk84t\") pod \"service-ca-9c57cc56f-8ljnf\" (UID: \"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5\") " pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:23 crc kubenswrapper[5017]: W0122 14:05:23.547223 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4ffbc55_9c5f_46c3_9c11_e868ad69196b.slice/crio-a8a0310eebf8b5724b2ec6f42c8b85c27cba9e8adf1d552db10fcb5dc450a397 WatchSource:0}: Error finding container a8a0310eebf8b5724b2ec6f42c8b85c27cba9e8adf1d552db10fcb5dc450a397: Status 404 returned error can't find the container with id a8a0310eebf8b5724b2ec6f42c8b85c27cba9e8adf1d552db10fcb5dc450a397 Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.565571 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/44587741-560f-4a80-8497-4034c8f5c80a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xtlnn\" (UID: \"44587741-560f-4a80-8497-4034c8f5c80a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.580994 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x"] Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.582687 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9n22\" (UniqueName: \"kubernetes.io/projected/99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f-kube-api-access-w9n22\") pod \"multus-admission-controller-857f4d67dd-hq6wt\" (UID: \"99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.591454 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.611400 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm8tf\" (UniqueName: \"kubernetes.io/projected/faf7a532-753d-4862-812c-3ef174c75f81-kube-api-access-fm8tf\") pod \"marketplace-operator-79b997595-7h4hj\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.611705 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx"] Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.615452 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.617636 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-p4xvc"] Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.618942 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbjjk\" (UniqueName: \"kubernetes.io/projected/188093c1-77b0-4e37-a53b-f3974ad22cff-kube-api-access-bbjjk\") pod \"collect-profiles-29484840-hsdps\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.624779 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.632526 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.632545 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.640648 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57kxf\" (UniqueName: \"kubernetes.io/projected/73bf70af-455b-4c32-8dda-324c3aa3d7b7-kube-api-access-57kxf\") pod \"machine-config-operator-74547568cd-52hrz\" (UID: \"73bf70af-455b-4c32-8dda-324c3aa3d7b7\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.643821 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-sk2hg"] Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.648360 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.658843 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xlgn\" (UniqueName: \"kubernetes.io/projected/96d2a4db-9f82-40e1-90fd-4f9ef3927f39-kube-api-access-4xlgn\") pod \"package-server-manager-789f6589d5-htpml\" (UID: \"96d2a4db-9f82-40e1-90fd-4f9ef3927f39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.660912 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd"] Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.665416 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.675865 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc5nq\" (UniqueName: \"kubernetes.io/projected/239e0484-3bc8-4628-ba87-d550ce2abb13-kube-api-access-kc5nq\") pod \"etcd-operator-b45778765-dpdfj\" (UID: \"239e0484-3bc8-4628-ba87-d550ce2abb13\") " pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.677717 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.698296 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff503dda-7e52-4cb1-920f-bea378ae7a10-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-g77fg\" (UID: \"ff503dda-7e52-4cb1-920f-bea378ae7a10\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.717548 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjt6c\" (UniqueName: \"kubernetes.io/projected/be128e90-65d6-43a9-9714-a031f24f23c4-kube-api-access-xjt6c\") pod \"machine-config-controller-84d6567774-l5nvn\" (UID: \"be128e90-65d6-43a9-9714-a031f24f23c4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.730694 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.848172 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.848223 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.848385 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.849814 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-dir\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.849955 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.849984 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850005 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850037 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-registry-certificates\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850060 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-trusted-ca\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850091 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850198 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850224 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850276 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850323 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850373 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74271c15-b41a-4067-996a-3d24215d6eb8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850432 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850457 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-bound-sa-token\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850480 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-policies\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: E0122 14:05:23.850597 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:24.350576471 +0000 UTC m=+139.343038409 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850654 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wksj\" (UniqueName: \"kubernetes.io/projected/ae146171-4a21-45b6-8484-66fbdb597a5e-kube-api-access-2wksj\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850693 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850725 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm72m\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-kube-api-access-pm72m\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.850753 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-registry-tls\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.851194 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74271c15-b41a-4067-996a-3d24215d6eb8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.851285 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.852050 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: W0122 14:05:23.862460 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod382be394_5527_4c07_9600_f70cb1c1ac18.slice/crio-f5959931cecd3e28b74df4d56a492cc47f9997bb72a46315aeb9a2ab2253c6bf WatchSource:0}: Error finding container f5959931cecd3e28b74df4d56a492cc47f9997bb72a46315aeb9a2ab2253c6bf: Status 404 returned error can't find the container with id f5959931cecd3e28b74df4d56a492cc47f9997bb72a46315aeb9a2ab2253c6bf Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.879735 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:23 crc kubenswrapper[5017]: W0122 14:05:23.881426 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2096a6e4_2ef6_410b_b7e5_e26c11f4ebb1.slice/crio-ce0a4ad67de7ac3b63cf1df1cd98cba479d735380e652e2f6e68ca8739b98098 WatchSource:0}: Error finding container ce0a4ad67de7ac3b63cf1df1cd98cba479d735380e652e2f6e68ca8739b98098: Status 404 returned error can't find the container with id ce0a4ad67de7ac3b63cf1df1cd98cba479d735380e652e2f6e68ca8739b98098 Jan 22 14:05:23 crc kubenswrapper[5017]: W0122 14:05:23.884169 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod331d69d8_6f0e_418f_baef_1162a103675e.slice/crio-69422d063308c6f2516e17b0970606b3aa12a978ea76e34e6bef7210ddaa9ffb WatchSource:0}: Error finding container 69422d063308c6f2516e17b0970606b3aa12a978ea76e34e6bef7210ddaa9ffb: Status 404 returned error can't find the container with id 69422d063308c6f2516e17b0970606b3aa12a978ea76e34e6bef7210ddaa9ffb Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.941430 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953337 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953517 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-bound-sa-token\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953536 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-policies\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953555 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-tmpfs\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953579 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wksj\" (UniqueName: \"kubernetes.io/projected/ae146171-4a21-45b6-8484-66fbdb597a5e-kube-api-access-2wksj\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953613 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/abfe1f56-9e2c-477c-a6f0-58cbc0513563-srv-cert\") pod \"olm-operator-6b444d44fb-ctq6h\" (UID: \"abfe1f56-9e2c-477c-a6f0-58cbc0513563\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953629 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953647 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7d76ed93-3c40-4912-bbd2-ec4af6cefcc1-profile-collector-cert\") pod \"catalog-operator-68c6474976-nlphk\" (UID: \"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953674 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm72m\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-kube-api-access-pm72m\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953699 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/abfe1f56-9e2c-477c-a6f0-58cbc0513563-profile-collector-cert\") pod \"olm-operator-6b444d44fb-ctq6h\" (UID: \"abfe1f56-9e2c-477c-a6f0-58cbc0513563\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953716 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-registry-tls\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953734 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10105fdd-b631-4438-a0c7-308a45f9e11e-service-ca-bundle\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953754 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cf1f133-a18d-4beb-952c-24b3d6530c8f-config-volume\") pod \"dns-default-ssjmv\" (UID: \"9cf1f133-a18d-4beb-952c-24b3d6530c8f\") " pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953783 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74271c15-b41a-4067-996a-3d24215d6eb8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953799 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87792bd4-454a-40fb-96fc-bf89d0aac093-metrics-tls\") pod \"dns-operator-744455d44c-l5x2x\" (UID: \"87792bd4-454a-40fb-96fc-bf89d0aac093\") " pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953813 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxqdz\" (UniqueName: \"kubernetes.io/projected/abfe1f56-9e2c-477c-a6f0-58cbc0513563-kube-api-access-vxqdz\") pod \"olm-operator-6b444d44fb-ctq6h\" (UID: \"abfe1f56-9e2c-477c-a6f0-58cbc0513563\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953829 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9k2r\" (UniqueName: \"kubernetes.io/projected/10105fdd-b631-4438-a0c7-308a45f9e11e-kube-api-access-h9k2r\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953886 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953918 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953938 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn9zk\" (UniqueName: \"kubernetes.io/projected/7d76ed93-3c40-4912-bbd2-ec4af6cefcc1-kube-api-access-qn9zk\") pod \"catalog-operator-68c6474976-nlphk\" (UID: \"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953951 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7d76ed93-3c40-4912-bbd2-ec4af6cefcc1-srv-cert\") pod \"catalog-operator-68c6474976-nlphk\" (UID: \"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953968 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhjqh\" (UniqueName: \"kubernetes.io/projected/87792bd4-454a-40fb-96fc-bf89d0aac093-kube-api-access-jhjqh\") pod \"dns-operator-744455d44c-l5x2x\" (UID: \"87792bd4-454a-40fb-96fc-bf89d0aac093\") " pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953981 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-webhook-cert\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.953996 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9cf1f133-a18d-4beb-952c-24b3d6530c8f-metrics-tls\") pod \"dns-default-ssjmv\" (UID: \"9cf1f133-a18d-4beb-952c-24b3d6530c8f\") " pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954013 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954028 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954043 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-dir\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954059 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954073 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/10105fdd-b631-4438-a0c7-308a45f9e11e-default-certificate\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954089 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/82ca4b25-fb05-4542-814f-9a9cc19662d5-metrics-tls\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954103 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csshc\" (UniqueName: \"kubernetes.io/projected/82ca4b25-fb05-4542-814f-9a9cc19662d5-kube-api-access-csshc\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954134 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-registry-certificates\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954149 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-trusted-ca\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954172 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/10105fdd-b631-4438-a0c7-308a45f9e11e-stats-auth\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954197 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954213 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954230 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-apiservice-cert\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954246 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954291 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954308 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/10105fdd-b631-4438-a0c7-308a45f9e11e-metrics-certs\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954322 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/82ca4b25-fb05-4542-814f-9a9cc19662d5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954339 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82ca4b25-fb05-4542-814f-9a9cc19662d5-trusted-ca\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954358 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74271c15-b41a-4067-996a-3d24215d6eb8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954388 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lgg7\" (UniqueName: \"kubernetes.io/projected/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-kube-api-access-8lgg7\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954418 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.954434 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rnzt\" (UniqueName: \"kubernetes.io/projected/9cf1f133-a18d-4beb-952c-24b3d6530c8f-kube-api-access-6rnzt\") pod \"dns-default-ssjmv\" (UID: \"9cf1f133-a18d-4beb-952c-24b3d6530c8f\") " pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:23 crc kubenswrapper[5017]: E0122 14:05:23.954547 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:24.454532671 +0000 UTC m=+139.446994529 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.957388 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-policies\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.957442 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-dir\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.957839 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.958807 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-registry-certificates\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.960270 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-trusted-ca\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.960272 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.960944 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.962004 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74271c15-b41a-4067-996a-3d24215d6eb8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.962534 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.963551 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.965906 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.965952 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.967052 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.968050 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.968645 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.970614 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74271c15-b41a-4067-996a-3d24215d6eb8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.970626 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.971071 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.971147 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-registry-tls\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:23 crc kubenswrapper[5017]: I0122 14:05:23.997360 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-bound-sa-token\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.020298 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wksj\" (UniqueName: \"kubernetes.io/projected/ae146171-4a21-45b6-8484-66fbdb597a5e-kube-api-access-2wksj\") pod \"oauth-openshift-558db77b4-7pt94\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.041454 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm72m\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-kube-api-access-pm72m\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.049762 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.055381 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn9zk\" (UniqueName: \"kubernetes.io/projected/7d76ed93-3c40-4912-bbd2-ec4af6cefcc1-kube-api-access-qn9zk\") pod \"catalog-operator-68c6474976-nlphk\" (UID: \"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.055419 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7d76ed93-3c40-4912-bbd2-ec4af6cefcc1-srv-cert\") pod \"catalog-operator-68c6474976-nlphk\" (UID: \"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.055448 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhjqh\" (UniqueName: \"kubernetes.io/projected/87792bd4-454a-40fb-96fc-bf89d0aac093-kube-api-access-jhjqh\") pod \"dns-operator-744455d44c-l5x2x\" (UID: \"87792bd4-454a-40fb-96fc-bf89d0aac093\") " pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.055464 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-webhook-cert\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.055480 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9cf1f133-a18d-4beb-952c-24b3d6530c8f-metrics-tls\") pod \"dns-default-ssjmv\" (UID: \"9cf1f133-a18d-4beb-952c-24b3d6530c8f\") " pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.056498 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/10105fdd-b631-4438-a0c7-308a45f9e11e-default-certificate\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.057176 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/82ca4b25-fb05-4542-814f-9a9cc19662d5-metrics-tls\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.057212 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csshc\" (UniqueName: \"kubernetes.io/projected/82ca4b25-fb05-4542-814f-9a9cc19662d5-kube-api-access-csshc\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.057243 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.057713 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/10105fdd-b631-4438-a0c7-308a45f9e11e-stats-auth\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.057752 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-plugins-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.057786 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-apiservice-cert\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.057871 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/10105fdd-b631-4438-a0c7-308a45f9e11e-metrics-certs\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.057959 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/82ca4b25-fb05-4542-814f-9a9cc19662d5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.058138 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82ca4b25-fb05-4542-814f-9a9cc19662d5-trusted-ca\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.058164 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fe2340df-b827-4162-8910-7d38587dee24-cert\") pod \"ingress-canary-r84vw\" (UID: \"fe2340df-b827-4162-8910-7d38587dee24\") " pod="openshift-ingress-canary/ingress-canary-r84vw" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.058345 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639-node-bootstrap-token\") pod \"machine-config-server-pdfrn\" (UID: \"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639\") " pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.058466 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-csi-data-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.058540 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lgg7\" (UniqueName: \"kubernetes.io/projected/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-kube-api-access-8lgg7\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.058580 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rnzt\" (UniqueName: \"kubernetes.io/projected/9cf1f133-a18d-4beb-952c-24b3d6530c8f-kube-api-access-6rnzt\") pod \"dns-default-ssjmv\" (UID: \"9cf1f133-a18d-4beb-952c-24b3d6530c8f\") " pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.058634 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-tmpfs\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.058658 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-registration-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.058747 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/abfe1f56-9e2c-477c-a6f0-58cbc0513563-srv-cert\") pod \"olm-operator-6b444d44fb-ctq6h\" (UID: \"abfe1f56-9e2c-477c-a6f0-58cbc0513563\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.058773 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7d76ed93-3c40-4912-bbd2-ec4af6cefcc1-profile-collector-cert\") pod \"catalog-operator-68c6474976-nlphk\" (UID: \"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.059451 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drkqk\" (UniqueName: \"kubernetes.io/projected/6fd07d3c-3d37-4d33-b14d-94a806908619-kube-api-access-drkqk\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.059504 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/abfe1f56-9e2c-477c-a6f0-58cbc0513563-profile-collector-cert\") pod \"olm-operator-6b444d44fb-ctq6h\" (UID: \"abfe1f56-9e2c-477c-a6f0-58cbc0513563\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.059529 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10105fdd-b631-4438-a0c7-308a45f9e11e-service-ca-bundle\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.059558 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cf1f133-a18d-4beb-952c-24b3d6530c8f-config-volume\") pod \"dns-default-ssjmv\" (UID: \"9cf1f133-a18d-4beb-952c-24b3d6530c8f\") " pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.059710 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:24.559695566 +0000 UTC m=+139.552157514 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.060174 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-webhook-cert\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.060235 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7d76ed93-3c40-4912-bbd2-ec4af6cefcc1-srv-cert\") pod \"catalog-operator-68c6474976-nlphk\" (UID: \"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.060662 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-tmpfs\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.062880 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-apiservice-cert\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.071418 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/10105fdd-b631-4438-a0c7-308a45f9e11e-default-certificate\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.072157 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7d76ed93-3c40-4912-bbd2-ec4af6cefcc1-profile-collector-cert\") pod \"catalog-operator-68c6474976-nlphk\" (UID: \"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.072959 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82ca4b25-fb05-4542-814f-9a9cc19662d5-trusted-ca\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.073480 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10105fdd-b631-4438-a0c7-308a45f9e11e-service-ca-bundle\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.061290 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4497p\" (UniqueName: \"kubernetes.io/projected/fe2340df-b827-4162-8910-7d38587dee24-kube-api-access-4497p\") pod \"ingress-canary-r84vw\" (UID: \"fe2340df-b827-4162-8910-7d38587dee24\") " pod="openshift-ingress-canary/ingress-canary-r84vw" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.073902 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87792bd4-454a-40fb-96fc-bf89d0aac093-metrics-tls\") pod \"dns-operator-744455d44c-l5x2x\" (UID: \"87792bd4-454a-40fb-96fc-bf89d0aac093\") " pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.073931 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxqdz\" (UniqueName: \"kubernetes.io/projected/abfe1f56-9e2c-477c-a6f0-58cbc0513563-kube-api-access-vxqdz\") pod \"olm-operator-6b444d44fb-ctq6h\" (UID: \"abfe1f56-9e2c-477c-a6f0-58cbc0513563\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.073934 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cf1f133-a18d-4beb-952c-24b3d6530c8f-config-volume\") pod \"dns-default-ssjmv\" (UID: \"9cf1f133-a18d-4beb-952c-24b3d6530c8f\") " pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.073984 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8v2x\" (UniqueName: \"kubernetes.io/projected/5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639-kube-api-access-k8v2x\") pod \"machine-config-server-pdfrn\" (UID: \"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639\") " pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.074376 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639-certs\") pod \"machine-config-server-pdfrn\" (UID: \"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639\") " pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.074567 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/82ca4b25-fb05-4542-814f-9a9cc19662d5-metrics-tls\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.074584 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9k2r\" (UniqueName: \"kubernetes.io/projected/10105fdd-b631-4438-a0c7-308a45f9e11e-kube-api-access-h9k2r\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.074848 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-socket-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.075132 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-mountpoint-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.074892 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9cf1f133-a18d-4beb-952c-24b3d6530c8f-metrics-tls\") pod \"dns-default-ssjmv\" (UID: \"9cf1f133-a18d-4beb-952c-24b3d6530c8f\") " pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.075227 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/abfe1f56-9e2c-477c-a6f0-58cbc0513563-profile-collector-cert\") pod \"olm-operator-6b444d44fb-ctq6h\" (UID: \"abfe1f56-9e2c-477c-a6f0-58cbc0513563\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.074897 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.075823 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/10105fdd-b631-4438-a0c7-308a45f9e11e-metrics-certs\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.076306 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/abfe1f56-9e2c-477c-a6f0-58cbc0513563-srv-cert\") pod \"olm-operator-6b444d44fb-ctq6h\" (UID: \"abfe1f56-9e2c-477c-a6f0-58cbc0513563\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.079752 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87792bd4-454a-40fb-96fc-bf89d0aac093-metrics-tls\") pod \"dns-operator-744455d44c-l5x2x\" (UID: \"87792bd4-454a-40fb-96fc-bf89d0aac093\") " pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.080040 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/10105fdd-b631-4438-a0c7-308a45f9e11e-stats-auth\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.100283 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn9zk\" (UniqueName: \"kubernetes.io/projected/7d76ed93-3c40-4912-bbd2-ec4af6cefcc1-kube-api-access-qn9zk\") pod \"catalog-operator-68c6474976-nlphk\" (UID: \"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.117588 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhjqh\" (UniqueName: \"kubernetes.io/projected/87792bd4-454a-40fb-96fc-bf89d0aac093-kube-api-access-jhjqh\") pod \"dns-operator-744455d44c-l5x2x\" (UID: \"87792bd4-454a-40fb-96fc-bf89d0aac093\") " pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.137362 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csshc\" (UniqueName: \"kubernetes.io/projected/82ca4b25-fb05-4542-814f-9a9cc19662d5-kube-api-access-csshc\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.157476 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.158661 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rnzt\" (UniqueName: \"kubernetes.io/projected/9cf1f133-a18d-4beb-952c-24b3d6530c8f-kube-api-access-6rnzt\") pod \"dns-default-ssjmv\" (UID: \"9cf1f133-a18d-4beb-952c-24b3d6530c8f\") " pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.175991 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.176081 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:24.676062611 +0000 UTC m=+139.668524479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.176106 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/82ca4b25-fb05-4542-814f-9a9cc19662d5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6d76w\" (UID: \"82ca4b25-fb05-4542-814f-9a9cc19662d5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.176345 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4497p\" (UniqueName: \"kubernetes.io/projected/fe2340df-b827-4162-8910-7d38587dee24-kube-api-access-4497p\") pod \"ingress-canary-r84vw\" (UID: \"fe2340df-b827-4162-8910-7d38587dee24\") " pod="openshift-ingress-canary/ingress-canary-r84vw" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.176393 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8v2x\" (UniqueName: \"kubernetes.io/projected/5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639-kube-api-access-k8v2x\") pod \"machine-config-server-pdfrn\" (UID: \"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639\") " pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.176434 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639-certs\") pod \"machine-config-server-pdfrn\" (UID: \"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639\") " pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.176503 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-socket-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.176846 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-mountpoint-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.176788 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-socket-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.176924 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-mountpoint-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.177067 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.177237 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-plugins-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.177366 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:24.677353999 +0000 UTC m=+139.669815857 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.177095 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-plugins-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.177800 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fe2340df-b827-4162-8910-7d38587dee24-cert\") pod \"ingress-canary-r84vw\" (UID: \"fe2340df-b827-4162-8910-7d38587dee24\") " pod="openshift-ingress-canary/ingress-canary-r84vw" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.178240 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639-node-bootstrap-token\") pod \"machine-config-server-pdfrn\" (UID: \"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639\") " pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.178308 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-csi-data-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.178428 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-csi-data-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.178478 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-registration-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.178536 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drkqk\" (UniqueName: \"kubernetes.io/projected/6fd07d3c-3d37-4d33-b14d-94a806908619-kube-api-access-drkqk\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.178600 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6fd07d3c-3d37-4d33-b14d-94a806908619-registration-dir\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.179420 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639-certs\") pod \"machine-config-server-pdfrn\" (UID: \"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639\") " pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.181601 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639-node-bootstrap-token\") pod \"machine-config-server-pdfrn\" (UID: \"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639\") " pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.182058 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fe2340df-b827-4162-8910-7d38587dee24-cert\") pod \"ingress-canary-r84vw\" (UID: \"fe2340df-b827-4162-8910-7d38587dee24\") " pod="openshift-ingress-canary/ingress-canary-r84vw" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.200332 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lgg7\" (UniqueName: \"kubernetes.io/projected/25f8bf41-00ef-46f3-bd41-a6a9ff25ab20-kube-api-access-8lgg7\") pod \"packageserver-d55dfcdfc-xhnxk\" (UID: \"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:24 crc kubenswrapper[5017]: W0122 14:05:24.207299 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb792244f_2eec_407d_a684_adad1b2ec199.slice/crio-1540de62c14e0a665fbc05c4d0eb1468fff3b7548e24310ac368376f352adb15 WatchSource:0}: Error finding container 1540de62c14e0a665fbc05c4d0eb1468fff3b7548e24310ac368376f352adb15: Status 404 returned error can't find the container with id 1540de62c14e0a665fbc05c4d0eb1468fff3b7548e24310ac368376f352adb15 Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.219050 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxqdz\" (UniqueName: \"kubernetes.io/projected/abfe1f56-9e2c-477c-a6f0-58cbc0513563-kube-api-access-vxqdz\") pod \"olm-operator-6b444d44fb-ctq6h\" (UID: \"abfe1f56-9e2c-477c-a6f0-58cbc0513563\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.222617 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r" event={"ID":"b792244f-2eec-407d-a684-adad1b2ec199","Type":"ContainerStarted","Data":"1540de62c14e0a665fbc05c4d0eb1468fff3b7548e24310ac368376f352adb15"} Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.223192 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" event={"ID":"331d69d8-6f0e-418f-baef-1162a103675e","Type":"ContainerStarted","Data":"69422d063308c6f2516e17b0970606b3aa12a978ea76e34e6bef7210ddaa9ffb"} Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.223733 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" event={"ID":"b4ffbc55-9c5f-46c3-9c11-e868ad69196b","Type":"ContainerStarted","Data":"a8a0310eebf8b5724b2ec6f42c8b85c27cba9e8adf1d552db10fcb5dc450a397"} Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.224819 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-p4xvc" event={"ID":"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1","Type":"ContainerStarted","Data":"ce0a4ad67de7ac3b63cf1df1cd98cba479d735380e652e2f6e68ca8739b98098"} Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.226228 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" event={"ID":"0bc6e378-496f-4a54-946a-99518f0d3570","Type":"ContainerStarted","Data":"6a5a5716c46d679387ea06c54c12f1478d7536d163c99546a1c00316fbd04f9d"} Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.246655 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9k2r\" (UniqueName: \"kubernetes.io/projected/10105fdd-b631-4438-a0c7-308a45f9e11e-kube-api-access-h9k2r\") pod \"router-default-5444994796-tm429\" (UID: \"10105fdd-b631-4438-a0c7-308a45f9e11e\") " pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.247077 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" event={"ID":"9ebc6dfd-7e32-4514-84be-0c290fc2c616","Type":"ContainerStarted","Data":"1f660e52847b5ace6878d472a260b876075842dc4371e489efa7cba06d45ec64"} Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.248345 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-sk2hg" event={"ID":"0e0b63e6-f2e1-4ed1-8501-3d0420964499","Type":"ContainerStarted","Data":"ae58bc3dcbc48559bac44ef786bd100d0f2f67b3e4210e58e4770ad26f9bd737"} Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.249201 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" event={"ID":"382be394-5527-4c07-9600-f70cb1c1ac18","Type":"ContainerStarted","Data":"f5959931cecd3e28b74df4d56a492cc47f9997bb72a46315aeb9a2ab2253c6bf"} Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.271499 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.280203 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.280644 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:24.780628489 +0000 UTC m=+139.773090347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.287100 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.288257 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-l57n2"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.291654 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.292151 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4497p\" (UniqueName: \"kubernetes.io/projected/fe2340df-b827-4162-8910-7d38587dee24-kube-api-access-4497p\") pod \"ingress-canary-r84vw\" (UID: \"fe2340df-b827-4162-8910-7d38587dee24\") " pod="openshift-ingress-canary/ingress-canary-r84vw" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.293009 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-t47kf"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.317063 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-r84vw" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.325203 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8v2x\" (UniqueName: \"kubernetes.io/projected/5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639-kube-api-access-k8v2x\") pod \"machine-config-server-pdfrn\" (UID: \"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639\") " pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.328475 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gbbvd"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.344310 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.349970 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drkqk\" (UniqueName: \"kubernetes.io/projected/6fd07d3c-3d37-4d33-b14d-94a806908619-kube-api-access-drkqk\") pod \"csi-hostpathplugin-4j8zp\" (UID: \"6fd07d3c-3d37-4d33-b14d-94a806908619\") " pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.352985 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-j4qnq"] Jan 22 14:05:24 crc kubenswrapper[5017]: W0122 14:05:24.364650 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a856b4a_e13f_4691_8019_ff1939ba7726.slice/crio-9fbf2d4a3ec0b1a6e50c8eee89499d4419a930e8363134538492e5b3f7faae86 WatchSource:0}: Error finding container 9fbf2d4a3ec0b1a6e50c8eee89499d4419a930e8363134538492e5b3f7faae86: Status 404 returned error can't find the container with id 9fbf2d4a3ec0b1a6e50c8eee89499d4419a930e8363134538492e5b3f7faae86 Jan 22 14:05:24 crc kubenswrapper[5017]: W0122 14:05:24.369697 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod910fc4c2_7723_4380_89d8_9988bb9e53c8.slice/crio-abd211b5cf820a7b045b7136e1d13a7222a1494eeac8c878f3c941dbb3a03d4e WatchSource:0}: Error finding container abd211b5cf820a7b045b7136e1d13a7222a1494eeac8c878f3c941dbb3a03d4e: Status 404 returned error can't find the container with id abd211b5cf820a7b045b7136e1d13a7222a1494eeac8c878f3c941dbb3a03d4e Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.382919 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.383420 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:24.883404735 +0000 UTC m=+139.875866593 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.390940 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.392889 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-8ljnf"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.450817 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.464080 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.483358 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.483887 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:24.983867835 +0000 UTC m=+139.976329703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.499374 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.564869 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.584609 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.584917 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:25.084904021 +0000 UTC m=+140.077365869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.593996 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-pdfrn" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.609083 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.685682 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.686201 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:25.186179764 +0000 UTC m=+140.178641622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.786788 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.787510 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:25.287492969 +0000 UTC m=+140.279954827 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: W0122 14:05:24.872311 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4b0269e_1d32_4db4_bc9a_185eb9ebcc33.slice/crio-8593a0071ad83236bf472acfddf8246a46d7db516b9bce4d68de7bdaa8d9f0d3 WatchSource:0}: Error finding container 8593a0071ad83236bf472acfddf8246a46d7db516b9bce4d68de7bdaa8d9f0d3: Status 404 returned error can't find the container with id 8593a0071ad83236bf472acfddf8246a46d7db516b9bce4d68de7bdaa8d9f0d3 Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.887569 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.888736 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:25.38871831 +0000 UTC m=+140.381180168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.888849 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.889152 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:25.389145403 +0000 UTC m=+140.381607261 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.936730 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.972239 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.984042 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55"] Jan 22 14:05:24 crc kubenswrapper[5017]: I0122 14:05:24.989467 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:24 crc kubenswrapper[5017]: E0122 14:05:24.990688 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:25.490672303 +0000 UTC m=+140.483134161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.073598 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.093204 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:25 crc kubenswrapper[5017]: E0122 14:05:25.093487 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:25.59347542 +0000 UTC m=+140.585937278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.194718 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:25 crc kubenswrapper[5017]: E0122 14:05:25.195476 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:25.695459973 +0000 UTC m=+140.687921831 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.299543 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:25 crc kubenswrapper[5017]: E0122 14:05:25.300102 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:25.800083422 +0000 UTC m=+140.792545280 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.356959 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" event={"ID":"73bf70af-455b-4c32-8dda-324c3aa3d7b7","Type":"ContainerStarted","Data":"2bc909e11e3194f56470dc9e83fbae03b571be2c22c239d68e47c54cde90b12d"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.372345 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" event={"ID":"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8","Type":"ContainerStarted","Data":"3353cd61ea7df5e804ea9877f540548404ae76d5eeee1c5c7860d646cbb12c8a"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.376198 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" event={"ID":"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d","Type":"ContainerStarted","Data":"c6998dd0c477c6713d6883ce51733cd956e8cd51a5dfe111474a0412ee0d68eb"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.378173 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" event={"ID":"1ee65bdd-e09e-4bf1-8025-ec86426073d1","Type":"ContainerStarted","Data":"44eefb6eadea642416a1d89e4904df6067b511ad95aff90df98d790f74cc869a"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.378999 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" event={"ID":"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5","Type":"ContainerStarted","Data":"4e338eef53cdb6773ca8e58fbc685419b612389a8d7af8d002603bea829be397"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.381676 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-tm429" event={"ID":"10105fdd-b631-4438-a0c7-308a45f9e11e","Type":"ContainerStarted","Data":"a7bc7d9307d67ad3cd46e8fb21b9873de2c46288de5e9f3aa2b9149aa60ceb5e"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.385639 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" event={"ID":"8a856b4a-e13f-4691-8019-ff1939ba7726","Type":"ContainerStarted","Data":"9fbf2d4a3ec0b1a6e50c8eee89499d4419a930e8363134538492e5b3f7faae86"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.396039 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-j4qnq" event={"ID":"66a013c9-01a4-4027-99ec-185312c81a41","Type":"ContainerStarted","Data":"943861b9602aaab984d2a6abc8d059d47b65f6b804eceaa8fa820836fb58cc09"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.400861 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:25 crc kubenswrapper[5017]: E0122 14:05:25.401259 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:25.901243132 +0000 UTC m=+140.893704990 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.403151 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.406355 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-v77t5"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.421563 5017 generic.go:334] "Generic (PLEG): container finished" podID="382be394-5527-4c07-9600-f70cb1c1ac18" containerID="d38b1af5fafb6832c302941a2ea181d71d871f2a2164adfc8f1233e319887aaa" exitCode=0 Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.421654 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" event={"ID":"382be394-5527-4c07-9600-f70cb1c1ac18","Type":"ContainerDied","Data":"d38b1af5fafb6832c302941a2ea181d71d871f2a2164adfc8f1233e319887aaa"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.429247 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hq6wt"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.429309 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7h4hj"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.468260 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" event={"ID":"745626f0-e2c2-4916-b91b-41c204d3a825","Type":"ContainerStarted","Data":"d54c27d27e81b078e956fe70323a22b2da970cea1cd04e1bd86d905c6d3a39d6"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.475686 5017 generic.go:334] "Generic (PLEG): container finished" podID="b4ffbc55-9c5f-46c3-9c11-e868ad69196b" containerID="b6b615102e693798e387714698dc3e9cc9cb507c02ece3644b0ee704855becbb" exitCode=0 Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.475745 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" event={"ID":"b4ffbc55-9c5f-46c3-9c11-e868ad69196b","Type":"ContainerDied","Data":"b6b615102e693798e387714698dc3e9cc9cb507c02ece3644b0ee704855becbb"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.491176 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.506408 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-dpdfj"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.508176 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:25 crc kubenswrapper[5017]: E0122 14:05:25.508914 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:26.008901968 +0000 UTC m=+141.001363826 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.515629 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.520194 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" event={"ID":"188093c1-77b0-4e37-a53b-f3974ad22cff","Type":"ContainerStarted","Data":"ba9ddced3ad778434f24148b69a32fb5a41cf53c64996f6f9a41dd6dbd7eafa1"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.541224 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-r84vw"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.555350 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.561953 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" event={"ID":"58666604-0909-4502-a493-94f59c37556b","Type":"ContainerStarted","Data":"9dfc25e5e4337f9f0118a3a0a0a40aacc9b6303c2b7ce5914f3cc0103a447aae"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.625421 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:25 crc kubenswrapper[5017]: E0122 14:05:25.625876 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:26.125858981 +0000 UTC m=+141.118320839 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.632962 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.652785 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-pdfrn" event={"ID":"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639","Type":"ContainerStarted","Data":"9929111eb0b141df487fad3a4a5ae242e3c63fda6221021f74ae37f6687f993a"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.686487 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" event={"ID":"1a853328-872d-4a4e-98d1-681be7eb3603","Type":"ContainerStarted","Data":"d4fe4ac8eb6d48f705da75faab75aba7b850cb548a5005581e2c92192048eaf0"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.715898 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7pt94"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.717599 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.724761 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-sk2hg" event={"ID":"0e0b63e6-f2e1-4ed1-8501-3d0420964499","Type":"ContainerStarted","Data":"df027b665d334cabbde611d03751f11d53a2c0ddb7e54f303c8ec4cb66160c18"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.725381 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-sk2hg" Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.726775 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:25 crc kubenswrapper[5017]: E0122 14:05:25.727196 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:26.227182996 +0000 UTC m=+141.219644854 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.735824 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" event={"ID":"910fc4c2-7723-4380-89d8-9988bb9e53c8","Type":"ContainerStarted","Data":"abd211b5cf820a7b045b7136e1d13a7222a1494eeac8c878f3c941dbb3a03d4e"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.738979 5017 patch_prober.go:28] interesting pod/downloads-7954f5f757-sk2hg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.739063 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sk2hg" podUID="0e0b63e6-f2e1-4ed1-8501-3d0420964499" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.739795 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" event={"ID":"9ebc6dfd-7e32-4514-84be-0c290fc2c616","Type":"ContainerStarted","Data":"3db80271604988818926f2b8f04041ea745069a4824522b84b3cd827f64a3b9f"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.744888 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.748826 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" event={"ID":"0bc6e378-496f-4a54-946a-99518f0d3570","Type":"ContainerStarted","Data":"6fd4352013e9f2f56da59ac779aa0942db0d23ea2e0d46a8c7de40e73b3eae17"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.777106 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" event={"ID":"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33","Type":"ContainerStarted","Data":"8593a0071ad83236bf472acfddf8246a46d7db516b9bce4d68de7bdaa8d9f0d3"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.787484 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" event={"ID":"be128e90-65d6-43a9-9714-a031f24f23c4","Type":"ContainerStarted","Data":"1c66d8940bb85932768d93aef76fd8dede6dd82303d29773bd8ab3006355734d"} Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.793059 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.821825 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.827545 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.829234 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4j8zp"] Jan 22 14:05:25 crc kubenswrapper[5017]: E0122 14:05:25.829858 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:26.329839188 +0000 UTC m=+141.322301046 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.876258 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-l5x2x"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.877920 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.888668 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ssjmv"] Jan 22 14:05:25 crc kubenswrapper[5017]: I0122 14:05:25.930579 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:25 crc kubenswrapper[5017]: E0122 14:05:25.930971 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:26.430958917 +0000 UTC m=+141.423420775 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:25 crc kubenswrapper[5017]: W0122 14:05:25.970131 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fd07d3c_3d37_4d33_b14d_94a806908619.slice/crio-b7a93803062499d6e5edc3558eaa3df6a47780b3acb10dc8dbc271c127388bb0 WatchSource:0}: Error finding container b7a93803062499d6e5edc3558eaa3df6a47780b3acb10dc8dbc271c127388bb0: Status 404 returned error can't find the container with id b7a93803062499d6e5edc3558eaa3df6a47780b3acb10dc8dbc271c127388bb0 Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.031612 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.032980 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:26.532787876 +0000 UTC m=+141.525249734 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:26 crc kubenswrapper[5017]: W0122 14:05:26.043725 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87792bd4_454a_40fb_96fc_bf89d0aac093.slice/crio-0ad31e41a9b65174ce60d12d5818d889802e6dca819fdbe1829b80eb3aabb538 WatchSource:0}: Error finding container 0ad31e41a9b65174ce60d12d5818d889802e6dca819fdbe1829b80eb3aabb538: Status 404 returned error can't find the container with id 0ad31e41a9b65174ce60d12d5818d889802e6dca819fdbe1829b80eb3aabb538 Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.049588 5017 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod331d69d8_6f0e_418f_baef_1162a103675e.slice/crio-conmon-925bbef4f887d813020cd070b2152e29086de236d4141eb63301e3c32dff10b4.scope\": RecentStats: unable to find data in memory cache]" Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.134554 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.136108 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:26.636092107 +0000 UTC m=+141.628553955 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.237653 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.237975 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:26.737958277 +0000 UTC m=+141.730420125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.338909 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.339323 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:26.839308183 +0000 UTC m=+141.831770041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.411490 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" podStartSLOduration=121.411476512 podStartE2EDuration="2m1.411476512s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:26.409841225 +0000 UTC m=+141.402303083" watchObservedRunningTime="2026-01-22 14:05:26.411476512 +0000 UTC m=+141.403938370" Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.436752 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-sk2hg" podStartSLOduration=121.436736966 podStartE2EDuration="2m1.436736966s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:26.435718386 +0000 UTC m=+141.428180244" watchObservedRunningTime="2026-01-22 14:05:26.436736966 +0000 UTC m=+141.429198814" Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.440445 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.440915 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:26.940897955 +0000 UTC m=+141.933359813 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.471904 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k82zx" podStartSLOduration=121.471884893 podStartE2EDuration="2m1.471884893s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:26.47071315 +0000 UTC m=+141.463175008" watchObservedRunningTime="2026-01-22 14:05:26.471884893 +0000 UTC m=+141.464346751" Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.494390 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" podStartSLOduration=121.494370418 podStartE2EDuration="2m1.494370418s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:26.493455432 +0000 UTC m=+141.485917290" watchObservedRunningTime="2026-01-22 14:05:26.494370418 +0000 UTC m=+141.486832276" Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.543014 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.543486 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.043470485 +0000 UTC m=+142.035932343 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.652622 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.652831 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.152801969 +0000 UTC m=+142.145263837 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.653246 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.653588 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.153577032 +0000 UTC m=+142.146038890 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.754934 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.755748 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.25572685 +0000 UTC m=+142.248188708 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.859492 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.865985 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.36596426 +0000 UTC m=+142.358426118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.893213 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" event={"ID":"1ee65bdd-e09e-4bf1-8025-ec86426073d1","Type":"ContainerStarted","Data":"c26ad76492ee0f027fae5dbf6ac53216c84e37c0d061e2e8429043d86f233c56"} Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.898270 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" event={"ID":"90b0ba9d-f818-4059-a895-fe9947511987","Type":"ContainerStarted","Data":"db571e783d20d80662352a549473964d87c97d067e996eb826a3a3548e51b149"} Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.898321 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" event={"ID":"90b0ba9d-f818-4059-a895-fe9947511987","Type":"ContainerStarted","Data":"ac8eaf21d0df661e6b882c97f80e68eec3f88dd6337ee8d911fea774709a656c"} Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.941259 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5w4zs" podStartSLOduration=121.941241757 podStartE2EDuration="2m1.941241757s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:26.93960035 +0000 UTC m=+141.932062208" watchObservedRunningTime="2026-01-22 14:05:26.941241757 +0000 UTC m=+141.933703615" Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.955149 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" event={"ID":"188093c1-77b0-4e37-a53b-f3974ad22cff","Type":"ContainerStarted","Data":"263a3191ddc74a5769fc99a7adfca427b4ad827a2f55b6aeccd7af7a3ca297ff"} Jan 22 14:05:26 crc kubenswrapper[5017]: I0122 14:05:26.960461 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:26 crc kubenswrapper[5017]: E0122 14:05:26.961731 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.461712123 +0000 UTC m=+142.454173981 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.073654 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.075105 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.575088844 +0000 UTC m=+142.567550802 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.129102 5017 csr.go:261] certificate signing request csr-k92j7 is approved, waiting to be issued Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.136503 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" event={"ID":"0bc6e378-496f-4a54-946a-99518f0d3570","Type":"ContainerStarted","Data":"2f7e8df2cd06abf0b8c9f88dd0b77f12d48f2286a032666a5b7daa1b688ebe41"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.161578 5017 csr.go:257] certificate signing request csr-k92j7 is issued Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.178048 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.178944 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.67892906 +0000 UTC m=+142.671390918 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.185665 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-6vm24" podStartSLOduration=122.185639563 podStartE2EDuration="2m2.185639563s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:27.177764127 +0000 UTC m=+142.170225985" watchObservedRunningTime="2026-01-22 14:05:27.185639563 +0000 UTC m=+142.178101421" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.188551 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" podStartSLOduration=122.188542036 podStartE2EDuration="2m2.188542036s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:27.011711107 +0000 UTC m=+142.004172965" watchObservedRunningTime="2026-01-22 14:05:27.188542036 +0000 UTC m=+142.181003894" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.202445 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" event={"ID":"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20","Type":"ContainerStarted","Data":"78d7930a92e69fca0e850aafbe24c08636bcd3efed910db93a69c36285bdfe38"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.209404 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" event={"ID":"58666604-0909-4502-a493-94f59c37556b","Type":"ContainerStarted","Data":"1d7fa2374df5aec94d87ff30919cce619450083b43e65ed7c1979ae731b8fd4a"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.211383 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" event={"ID":"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1","Type":"ContainerStarted","Data":"2215d44903961ef6afd5b23ed41531232ad8a439fff5904a150fdbb152fe2fe9"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.211434 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" event={"ID":"7d76ed93-3c40-4912-bbd2-ec4af6cefcc1","Type":"ContainerStarted","Data":"187e15eb29e6dc6bb8dde803fa350e4796058c8809c44c75b5ce86c782abf6d6"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.213481 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.242126 5017 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-nlphk container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.242503 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" podUID="7d76ed93-3c40-4912-bbd2-ec4af6cefcc1" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.281550 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.282384 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.782360595 +0000 UTC m=+142.774822453 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.353522 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-l57n2" event={"ID":"910fc4c2-7723-4380-89d8-9988bb9e53c8","Type":"ContainerStarted","Data":"7f7395d290335e0f2865244cb99ce103407067d0d9cab5b23e2284d3691325c1"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.353603 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" event={"ID":"99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f","Type":"ContainerStarted","Data":"34a6e501574e335943d378a9b209d1801c3742cb75c7ceb8bbeab57ea1e00f0c"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.383567 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.383809 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.883758642 +0000 UTC m=+142.876220500 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.384165 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.384589 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.884571995 +0000 UTC m=+142.877033853 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.421347 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" event={"ID":"96d2a4db-9f82-40e1-90fd-4f9ef3927f39","Type":"ContainerStarted","Data":"3c092b1fc69345a987069288f5003ebc8d797fce0822de28f2173736ead615b0"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.461734 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" event={"ID":"be128e90-65d6-43a9-9714-a031f24f23c4","Type":"ContainerStarted","Data":"137ed3eea0d376b303398b2c53e6d600f4e127eccd29ae11c24266646ff072e2"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.466271 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" event={"ID":"ae146171-4a21-45b6-8484-66fbdb597a5e","Type":"ContainerStarted","Data":"0e7b0c03bb68c83a1e27510454b72ceed213abc2935420101ca8f4a2374112c5"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.496244 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.496848 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:27.996814633 +0000 UTC m=+142.989276491 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.515224 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" event={"ID":"745626f0-e2c2-4916-b91b-41c204d3a825","Type":"ContainerStarted","Data":"61b399645f69f2206f9dfeb39e5afe41330e0e19d4d8e4d4bbb956d3d49cfbc4"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.539513 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-tm429" event={"ID":"10105fdd-b631-4438-a0c7-308a45f9e11e","Type":"ContainerStarted","Data":"be806cfb898525904a6b871250b9eb8081412aff971797a467cc3d901b949e7a"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.563635 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" event={"ID":"abfe1f56-9e2c-477c-a6f0-58cbc0513563","Type":"ContainerStarted","Data":"a820ce1ee45f7d724d7051dd8828ef56e4fc0bfe7cc6b0eace71d8a7b951c29f"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.597567 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" podStartSLOduration=122.59754035 podStartE2EDuration="2m2.59754035s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:27.257877833 +0000 UTC m=+142.250339701" watchObservedRunningTime="2026-01-22 14:05:27.59754035 +0000 UTC m=+142.590002208" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.598090 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gkh55" podStartSLOduration=122.598080706 podStartE2EDuration="2m2.598080706s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:27.581719907 +0000 UTC m=+142.574181765" watchObservedRunningTime="2026-01-22 14:05:27.598080706 +0000 UTC m=+142.590542564" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.602475 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.604664 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:28.104648894 +0000 UTC m=+143.097110752 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.638583 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" event={"ID":"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7","Type":"ContainerStarted","Data":"688e7b10ac31a72a68f5fe379f87139e73e708f94d9ca8aa9a8e6fd154551d76"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.638650 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" event={"ID":"d6cf5eb5-b551-4a45-a2c6-7a0c081fc8c7","Type":"ContainerStarted","Data":"dc1567ab9210df7e0fd1c1d665b5f3a85cf56efbcd0b622397d02692a14eaa00"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.686098 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-tm429" podStartSLOduration=122.686081248 podStartE2EDuration="2m2.686081248s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:27.684945476 +0000 UTC m=+142.677407334" watchObservedRunningTime="2026-01-22 14:05:27.686081248 +0000 UTC m=+142.678543106" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.705685 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.705879 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:28.205854195 +0000 UTC m=+143.198316063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.706358 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.706946 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:28.206933876 +0000 UTC m=+143.199395734 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.738672 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" event={"ID":"382be394-5527-4c07-9600-f70cb1c1ac18","Type":"ContainerStarted","Data":"1f791842ebcf6b958ce2726cd531e098032bc994c5cbcb14fe0544ae8c4702e9"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.757969 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-v77t5" podStartSLOduration=122.757945918 podStartE2EDuration="2m2.757945918s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:27.739194001 +0000 UTC m=+142.731655859" watchObservedRunningTime="2026-01-22 14:05:27.757945918 +0000 UTC m=+142.750407776" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.771424 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" event={"ID":"44587741-560f-4a80-8497-4034c8f5c80a","Type":"ContainerStarted","Data":"fedabf08f84f96b8d56bc3f519de6052d25e65bd3dc6840f40d4d623f656e38c"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.771474 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" event={"ID":"44587741-560f-4a80-8497-4034c8f5c80a","Type":"ContainerStarted","Data":"eb597cd9b23ddab635af3c0d28ee2f4591c8f0240d3bb223761d1c81cad5260a"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.773385 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-pdfrn" event={"ID":"5cef9e3e-e4a8-4f72-b8aa-15b8a5f97639","Type":"ContainerStarted","Data":"bcc67db97112b36e560ff498d02fb4c58c63f83cbbfb0eb84d9ea6e02b3188af"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.787693 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-r84vw" event={"ID":"fe2340df-b827-4162-8910-7d38587dee24","Type":"ContainerStarted","Data":"1f416305b729200a3f24883e2f18118d82eaa3e1933b9c83f918fd34314e49d5"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.810030 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.811129 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:28.311095212 +0000 UTC m=+143.303557070 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.828423 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-j4qnq" event={"ID":"66a013c9-01a4-4027-99ec-185312c81a41","Type":"ContainerStarted","Data":"f6efecb73e32e1ac970b6891877e2fed057d48569e6782d1aff86ee6c0901e06"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.861474 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-p4xvc" event={"ID":"2096a6e4-2ef6-410b-b7e5-e26c11f4ebb1","Type":"ContainerStarted","Data":"9802723d05702c3d06d1e3d472fc3bd16b189c6578a7f435d9fc7e34ae6727a0"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.862367 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.898389 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" event={"ID":"239e0484-3bc8-4628-ba87-d550ce2abb13","Type":"ContainerStarted","Data":"989d62be7a7a4a8eb0874ab84a8d04c38f33cb842c0081850abfaadb5f0f0a78"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.899850 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-p4xvc" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.911821 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:27 crc kubenswrapper[5017]: E0122 14:05:27.917942 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:28.417922834 +0000 UTC m=+143.410384682 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.918472 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" podStartSLOduration=122.918455089 podStartE2EDuration="2m2.918455089s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:27.841270947 +0000 UTC m=+142.833732805" watchObservedRunningTime="2026-01-22 14:05:27.918455089 +0000 UTC m=+142.910916947" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.920387 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-pdfrn" podStartSLOduration=6.920370694 podStartE2EDuration="6.920370694s" podCreationTimestamp="2026-01-22 14:05:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:27.918959784 +0000 UTC m=+142.911421642" watchObservedRunningTime="2026-01-22 14:05:27.920370694 +0000 UTC m=+142.912832562" Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.932395 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" event={"ID":"8a856b4a-e13f-4691-8019-ff1939ba7726","Type":"ContainerStarted","Data":"5fb1809cbee3437dd3ab9c3d6d05a59c618065cc2e333ffee0bbcd8ed7714c41"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.947185 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ssjmv" event={"ID":"9cf1f133-a18d-4beb-952c-24b3d6530c8f","Type":"ContainerStarted","Data":"d70ec4763a0394d2d91d8aa825441f98026a1ed953162403e27fe2d8fae107a2"} Jan 22 14:05:27 crc kubenswrapper[5017]: I0122 14:05:27.968371 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" event={"ID":"ff503dda-7e52-4cb1-920f-bea378ae7a10","Type":"ContainerStarted","Data":"9db4bde9c7d5516e103b78ca7e9967a99e6cbff47dbce1dac1ef294ea7a4bb5a"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.008644 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" event={"ID":"e9e7d8d3-3f4f-4573-83e3-d2999cc53aa5","Type":"ContainerStarted","Data":"d01aa4f0d988d79a5b6440466fdeb55170a8796f7b3dc6254c630e925aeaed28"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.009947 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-r84vw" podStartSLOduration=7.009925931 podStartE2EDuration="7.009925931s" podCreationTimestamp="2026-01-22 14:05:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.002793737 +0000 UTC m=+142.995255585" watchObservedRunningTime="2026-01-22 14:05:28.009925931 +0000 UTC m=+143.002387789" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.015644 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:28 crc kubenswrapper[5017]: E0122 14:05:28.025213 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:28.525179619 +0000 UTC m=+143.517641487 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.084703 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" event={"ID":"73bf70af-455b-4c32-8dda-324c3aa3d7b7","Type":"ContainerStarted","Data":"1930ffc11b0aa0f2f256b4f00f7592ee3590de5f8a40f2835e67e471e3a5ce29"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.123299 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:28 crc kubenswrapper[5017]: E0122 14:05:28.123558 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:28.623545258 +0000 UTC m=+143.616007126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.143454 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" event={"ID":"faf7a532-753d-4862-812c-3ef174c75f81","Type":"ContainerStarted","Data":"f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.143502 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" event={"ID":"faf7a532-753d-4862-812c-3ef174c75f81","Type":"ContainerStarted","Data":"fbc8a82c622ea9d02a9b90152d12d1a97189b0f71ae2d002919b398f60d535fc"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.144414 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.158402 5017 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-7h4hj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.159023 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" podUID="faf7a532-753d-4862-812c-3ef174c75f81" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.171690 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-22 14:00:27 +0000 UTC, rotation deadline is 2026-11-16 16:42:43.618711034 +0000 UTC Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.181234 5017 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7154h37m15.437495942s for next certificate rotation Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.175433 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" event={"ID":"b4b0269e-1d32-4db4-bc9a-185eb9ebcc33","Type":"ContainerStarted","Data":"b7ba8e3591a85105f3c4cbbb8506e7e243040dd6d6092234f0b82ba5cb8589b0"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.186920 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xtlnn" podStartSLOduration=123.186894494 podStartE2EDuration="2m3.186894494s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.122342384 +0000 UTC m=+143.114804252" watchObservedRunningTime="2026-01-22 14:05:28.186894494 +0000 UTC m=+143.179356362" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.189551 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.190035 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.208625 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r" event={"ID":"b792244f-2eec-407d-a684-adad1b2ec199","Type":"ContainerStarted","Data":"96c6e9bf9980ae537fe475c699857940c05b128d6c4542f4cd634da0d429fd54"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.212515 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" event={"ID":"87792bd4-454a-40fb-96fc-bf89d0aac093","Type":"ContainerStarted","Data":"0ad31e41a9b65174ce60d12d5818d889802e6dca819fdbe1829b80eb3aabb538"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.215518 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" event={"ID":"6fd07d3c-3d37-4d33-b14d-94a806908619","Type":"ContainerStarted","Data":"b7a93803062499d6e5edc3558eaa3df6a47780b3acb10dc8dbc271c127388bb0"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.222675 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" event={"ID":"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8","Type":"ContainerStarted","Data":"655ad5d27f6930521fa0a5421202b7b12e08666570f545cec10387040c9476c1"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.223814 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.225921 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:28 crc kubenswrapper[5017]: E0122 14:05:28.227398 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:28.727382585 +0000 UTC m=+143.719844443 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.240840 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.254951 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-j4qnq" podStartSLOduration=123.254929615 podStartE2EDuration="2m3.254929615s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.193972427 +0000 UTC m=+143.186434285" watchObservedRunningTime="2026-01-22 14:05:28.254929615 +0000 UTC m=+143.247391473" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.256508 5017 generic.go:334] "Generic (PLEG): container finished" podID="331d69d8-6f0e-418f-baef-1162a103675e" containerID="925bbef4f887d813020cd070b2152e29086de236d4141eb63301e3c32dff10b4" exitCode=0 Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.256567 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" event={"ID":"331d69d8-6f0e-418f-baef-1162a103675e","Type":"ContainerDied","Data":"925bbef4f887d813020cd070b2152e29086de236d4141eb63301e3c32dff10b4"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.263970 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.293805 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" podStartSLOduration=123.293786409 podStartE2EDuration="2m3.293786409s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.257339944 +0000 UTC m=+143.249801812" watchObservedRunningTime="2026-01-22 14:05:28.293786409 +0000 UTC m=+143.286248277" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.306623 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" event={"ID":"82ca4b25-fb05-4542-814f-9a9cc19662d5","Type":"ContainerStarted","Data":"b3cd3d9e24a0246c69dcc650cacf0ffc6f048c879e8b46af95f911f075bd0629"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.328564 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" event={"ID":"ca0f873e-31ca-46ef-b2cf-c0e0a1ce8a3d","Type":"ContainerStarted","Data":"d5f1631412030d590c3e9a79df0abbbbcc3a2462e4c66b3f3d37e6c0fb919b30"} Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.328855 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:28 crc kubenswrapper[5017]: E0122 14:05:28.329726 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:28.829707718 +0000 UTC m=+143.822169566 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.330051 5017 patch_prober.go:28] interesting pod/downloads-7954f5f757-sk2hg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.330356 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sk2hg" podUID="0e0b63e6-f2e1-4ed1-8501-3d0420964499" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.337469 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-p4xvc" podStartSLOduration=123.33745305 podStartE2EDuration="2m3.33745305s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.293174601 +0000 UTC m=+143.285636459" watchObservedRunningTime="2026-01-22 14:05:28.33745305 +0000 UTC m=+143.329914908" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.393151 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.407350 5017 patch_prober.go:28] interesting pod/router-default-5444994796-tm429 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:05:28 crc kubenswrapper[5017]: [-]has-synced failed: reason withheld Jan 22 14:05:28 crc kubenswrapper[5017]: [+]process-running ok Jan 22 14:05:28 crc kubenswrapper[5017]: healthz check failed Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.407740 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tm429" podUID="10105fdd-b631-4438-a0c7-308a45f9e11e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.429775 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.431461 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" podStartSLOduration=123.431442575 podStartE2EDuration="2m3.431442575s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.34755185 +0000 UTC m=+143.340013708" watchObservedRunningTime="2026-01-22 14:05:28.431442575 +0000 UTC m=+143.423904433" Jan 22 14:05:28 crc kubenswrapper[5017]: E0122 14:05:28.432785 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:28.932769253 +0000 UTC m=+143.925231111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.435717 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wbqbm" podStartSLOduration=123.435700957 podStartE2EDuration="2m3.435700957s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.432653479 +0000 UTC m=+143.425115327" watchObservedRunningTime="2026-01-22 14:05:28.435700957 +0000 UTC m=+143.428162815" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.524308 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" podStartSLOduration=123.524287066 podStartE2EDuration="2m3.524287066s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.463105892 +0000 UTC m=+143.455567750" watchObservedRunningTime="2026-01-22 14:05:28.524287066 +0000 UTC m=+143.516748924" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.538988 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:28 crc kubenswrapper[5017]: E0122 14:05:28.539693 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.039681877 +0000 UTC m=+144.032143735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.567140 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-8ljnf" podStartSLOduration=123.567101453 podStartE2EDuration="2m3.567101453s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.526542351 +0000 UTC m=+143.519004209" watchObservedRunningTime="2026-01-22 14:05:28.567101453 +0000 UTC m=+143.559563311" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.567813 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xrrlz" podStartSLOduration=123.567808194 podStartE2EDuration="2m3.567808194s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.560535075 +0000 UTC m=+143.552996933" watchObservedRunningTime="2026-01-22 14:05:28.567808194 +0000 UTC m=+143.560270052" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.614476 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7glj" podStartSLOduration=123.614454711 podStartE2EDuration="2m3.614454711s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.612469814 +0000 UTC m=+143.604931682" watchObservedRunningTime="2026-01-22 14:05:28.614454711 +0000 UTC m=+143.606916569" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.664308 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:28 crc kubenswrapper[5017]: E0122 14:05:28.664763 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.164743112 +0000 UTC m=+144.157204970 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.729619 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" podStartSLOduration=123.729597311 podStartE2EDuration="2m3.729597311s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.727248104 +0000 UTC m=+143.719709962" watchObservedRunningTime="2026-01-22 14:05:28.729597311 +0000 UTC m=+143.722059169" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.772974 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:28 crc kubenswrapper[5017]: E0122 14:05:28.773354 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.273339045 +0000 UTC m=+144.265800903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.783896 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r" podStartSLOduration=123.783856967 podStartE2EDuration="2m3.783856967s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:28.769916647 +0000 UTC m=+143.762378505" watchObservedRunningTime="2026-01-22 14:05:28.783856967 +0000 UTC m=+143.776318825" Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.873515 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:28 crc kubenswrapper[5017]: E0122 14:05:28.874143 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.374127395 +0000 UTC m=+144.366589253 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:28 crc kubenswrapper[5017]: I0122 14:05:28.975746 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:28 crc kubenswrapper[5017]: E0122 14:05:28.976292 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.476272183 +0000 UTC m=+144.468734041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.077740 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:29 crc kubenswrapper[5017]: E0122 14:05:29.078057 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.57803968 +0000 UTC m=+144.570501538 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.179148 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:29 crc kubenswrapper[5017]: E0122 14:05:29.180038 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.680026233 +0000 UTC m=+144.672488091 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.280735 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:29 crc kubenswrapper[5017]: E0122 14:05:29.280874 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.780854584 +0000 UTC m=+144.773316452 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.281427 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:29 crc kubenswrapper[5017]: E0122 14:05:29.281770 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.78176158 +0000 UTC m=+144.774223438 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.333455 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-dpdfj" event={"ID":"239e0484-3bc8-4628-ba87-d550ce2abb13","Type":"ContainerStarted","Data":"29bc4e483fe51cd143a07282b6a9152f54e9868d667f0dab5142c194663b6361"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.335410 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" event={"ID":"ff503dda-7e52-4cb1-920f-bea378ae7a10","Type":"ContainerStarted","Data":"b93ee81d03a4f7b1126418e0f95ef5b3cf04a3b2b11034cdd4e8dc1c61765c83"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.337331 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" event={"ID":"96d2a4db-9f82-40e1-90fd-4f9ef3927f39","Type":"ContainerStarted","Data":"b2bb979d1bc48874e574529e34b529cccbfe32d35b537b9f593498b22e3cfa09"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.337387 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" event={"ID":"96d2a4db-9f82-40e1-90fd-4f9ef3927f39","Type":"ContainerStarted","Data":"5cae45f14bc69b66c07ae346bfb72f256dac1bc1f5e97c52559d01561758bc2b"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.365954 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" event={"ID":"331d69d8-6f0e-418f-baef-1162a103675e","Type":"ContainerStarted","Data":"e918b33fb2e122f8121edbe8cc05586d75b301da101a0ced55370302f20b7887"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.366434 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.370311 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" event={"ID":"1ee65bdd-e09e-4bf1-8025-ec86426073d1","Type":"ContainerStarted","Data":"1bfc7df970881f23b630618a08e382bfbf2ab1286c7f68e5dba22a0a5934f6dc"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.371966 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" event={"ID":"6fd07d3c-3d37-4d33-b14d-94a806908619","Type":"ContainerStarted","Data":"a8c448ebd174a414b322f44497c6d14a5e324fcf646c27fd8b39333b69e0aec8"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.374194 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" event={"ID":"b4ffbc55-9c5f-46c3-9c11-e868ad69196b","Type":"ContainerStarted","Data":"6b7e4d278aa7a40695847bd19b316153c52d2d99f530c81074270981a5a62259"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.374349 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" event={"ID":"b4ffbc55-9c5f-46c3-9c11-e868ad69196b","Type":"ContainerStarted","Data":"7943341ea2922c7b18c583f3287dd379c720158ff07302ad8eb23e9a8948b88b"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.377894 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" event={"ID":"25f8bf41-00ef-46f3-bd41-a6a9ff25ab20","Type":"ContainerStarted","Data":"59e5efb0a790fddf9435ecf268bcc0d5a0d01f047723711c206fce7e74c93d4d"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.378385 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.380547 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-g77fg" podStartSLOduration=124.380534591 podStartE2EDuration="2m4.380534591s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.377871935 +0000 UTC m=+144.370333793" watchObservedRunningTime="2026-01-22 14:05:29.380534591 +0000 UTC m=+144.372996449" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.382711 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:29 crc kubenswrapper[5017]: E0122 14:05:29.383366 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.883345592 +0000 UTC m=+144.875807450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.383825 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" event={"ID":"99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f","Type":"ContainerStarted","Data":"89887a4cee42d3acf2ec63195b4e86e6b02ea5dd56d88b8999ab6431a9412cef"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.383889 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" event={"ID":"99574a1d-fcb0-4fac-a62c-1f58e9cc1a1f","Type":"ContainerStarted","Data":"9ce97cbf36e2a9e651c4eba3a2d0d007676574e5adeda129ddd4c147eda63ff9"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.387817 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" event={"ID":"abfe1f56-9e2c-477c-a6f0-58cbc0513563","Type":"ContainerStarted","Data":"65a0872326387deb2de294cfe92bc98df8a32ff65fa5c37477f4e6c241d9f078"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.388616 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.390527 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2lp2r" event={"ID":"b792244f-2eec-407d-a684-adad1b2ec199","Type":"ContainerStarted","Data":"1c24d4ef6078064946cd91a588ebe67707112414f7bb91cdd274ade1da39def5"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.394314 5017 patch_prober.go:28] interesting pod/router-default-5444994796-tm429 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:05:29 crc kubenswrapper[5017]: [-]has-synced failed: reason withheld Jan 22 14:05:29 crc kubenswrapper[5017]: [+]process-running ok Jan 22 14:05:29 crc kubenswrapper[5017]: healthz check failed Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.394355 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" event={"ID":"82ca4b25-fb05-4542-814f-9a9cc19662d5","Type":"ContainerStarted","Data":"d3e952b8db739f31984d26736a8372f20fa9d26703dfa2298f007839521a70ea"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.394380 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" event={"ID":"82ca4b25-fb05-4542-814f-9a9cc19662d5","Type":"ContainerStarted","Data":"1eb93583e478c580f4fcccfd230f34e1fa0d8c3d0d40a892e6d3f7a8e22cdb58"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.394354 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tm429" podUID="10105fdd-b631-4438-a0c7-308a45f9e11e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.396221 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-52hrz" event={"ID":"73bf70af-455b-4c32-8dda-324c3aa3d7b7","Type":"ContainerStarted","Data":"af3eceb82d7ed34d23325dfd76a29e09e2610d76990f1524386458f7a592ec99"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.402618 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" event={"ID":"58666604-0909-4502-a493-94f59c37556b","Type":"ContainerStarted","Data":"94425312c806b2b9effe241e0b8314a3bea190fbe3b70be8ad353484bf51e803"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.414167 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" event={"ID":"be128e90-65d6-43a9-9714-a031f24f23c4","Type":"ContainerStarted","Data":"c4c9490d26f55c62d2bb96135a184ca01e70af4f3689711a6fb2013bfc43fbfa"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.424826 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" podStartSLOduration=124.42481133 podStartE2EDuration="2m4.42481133s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.414141155 +0000 UTC m=+144.406603013" watchObservedRunningTime="2026-01-22 14:05:29.42481133 +0000 UTC m=+144.417273188" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.426408 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.428766 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ssjmv" event={"ID":"9cf1f133-a18d-4beb-952c-24b3d6530c8f","Type":"ContainerStarted","Data":"44bbc738ba8355b56420c91300ed3cf272f954c265747dd9e613d742e81f420d"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.428802 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ssjmv" event={"ID":"9cf1f133-a18d-4beb-952c-24b3d6530c8f","Type":"ContainerStarted","Data":"04af4575a8208d3667f1062b10f6212d0eabb3ceb49764a4739876f4fbe50324"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.429396 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.437420 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-r84vw" event={"ID":"fe2340df-b827-4162-8910-7d38587dee24","Type":"ContainerStarted","Data":"a9f92da43fdb36ac7240f82d9f968488b53ad0c0175f5e1a9d4f753e561e940b"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.442491 5017 generic.go:334] "Generic (PLEG): container finished" podID="188093c1-77b0-4e37-a53b-f3974ad22cff" containerID="263a3191ddc74a5769fc99a7adfca427b4ad827a2f55b6aeccd7af7a3ca297ff" exitCode=0 Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.442570 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" event={"ID":"188093c1-77b0-4e37-a53b-f3974ad22cff","Type":"ContainerDied","Data":"263a3191ddc74a5769fc99a7adfca427b4ad827a2f55b6aeccd7af7a3ca297ff"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.456932 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" event={"ID":"ae146171-4a21-45b6-8484-66fbdb597a5e","Type":"ContainerStarted","Data":"b782120a6e87582ef934563b24c110ef3a7ffd237fbdc1277b089e25711c846e"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.457903 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.470819 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" event={"ID":"87792bd4-454a-40fb-96fc-bf89d0aac093","Type":"ContainerStarted","Data":"a55464b4746d22745c6742fb9752c5cb14370f7fb7ef2011416e03e00bc653c4"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.470864 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" event={"ID":"87792bd4-454a-40fb-96fc-bf89d0aac093","Type":"ContainerStarted","Data":"105a9b6ee82ba1ae735cda7e475328645dfa81ce1d672f346f2afd21b2431f36"} Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.475892 5017 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-7h4hj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.475949 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" podUID="faf7a532-753d-4862-812c-3ef174c75f81" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.480186 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-nlphk" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.485274 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:29 crc kubenswrapper[5017]: E0122 14:05:29.490865 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:29.990849343 +0000 UTC m=+144.983311261 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.500643 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b5f9x" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.535358 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" podStartSLOduration=124.535340019 podStartE2EDuration="2m4.535340019s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.505508764 +0000 UTC m=+144.497970622" watchObservedRunningTime="2026-01-22 14:05:29.535340019 +0000 UTC m=+144.527801877" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.589555 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:29 crc kubenswrapper[5017]: E0122 14:05:29.591576 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:30.09155939 +0000 UTC m=+145.084021248 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.594306 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" podStartSLOduration=124.594286529 podStartE2EDuration="2m4.594286529s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.544506032 +0000 UTC m=+144.536967890" watchObservedRunningTime="2026-01-22 14:05:29.594286529 +0000 UTC m=+144.586748397" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.606291 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jkvz7"] Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.606993 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-t47kf" podStartSLOduration=124.606968952 podStartE2EDuration="2m4.606968952s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.580709359 +0000 UTC m=+144.573171217" watchObservedRunningTime="2026-01-22 14:05:29.606968952 +0000 UTC m=+144.599430810" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.607291 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.612708 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.631447 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-hq6wt" podStartSLOduration=124.631430093 podStartE2EDuration="2m4.631430093s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.628774187 +0000 UTC m=+144.621236055" watchObservedRunningTime="2026-01-22 14:05:29.631430093 +0000 UTC m=+144.623891951" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.644652 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jkvz7"] Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.693956 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.694047 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-catalog-content\") pod \"certified-operators-jkvz7\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.694076 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4qws\" (UniqueName: \"kubernetes.io/projected/51b300c4-69d8-49d7-a5e4-ec3054ceed30-kube-api-access-c4qws\") pod \"certified-operators-jkvz7\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.694133 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-utilities\") pod \"certified-operators-jkvz7\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: E0122 14:05:29.694476 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:30.19446173 +0000 UTC m=+145.186923598 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.730182 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" podStartSLOduration=124.730163674 podStartE2EDuration="2m4.730163674s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.729511325 +0000 UTC m=+144.721973183" watchObservedRunningTime="2026-01-22 14:05:29.730163674 +0000 UTC m=+144.722625542" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.731576 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" podStartSLOduration=124.731570024 podStartE2EDuration="2m4.731570024s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.689859288 +0000 UTC m=+144.682321146" watchObservedRunningTime="2026-01-22 14:05:29.731570024 +0000 UTC m=+144.724031882" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.796554 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.797197 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-catalog-content\") pod \"certified-operators-jkvz7\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.797255 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4qws\" (UniqueName: \"kubernetes.io/projected/51b300c4-69d8-49d7-a5e4-ec3054ceed30-kube-api-access-c4qws\") pod \"certified-operators-jkvz7\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.797314 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-utilities\") pod \"certified-operators-jkvz7\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.797862 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-utilities\") pod \"certified-operators-jkvz7\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: E0122 14:05:29.797989 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:30.297965447 +0000 UTC m=+145.290427305 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.798285 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-catalog-content\") pod \"certified-operators-jkvz7\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.806978 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-66rfh" podStartSLOduration=124.806955785 podStartE2EDuration="2m4.806955785s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.761548863 +0000 UTC m=+144.754010731" watchObservedRunningTime="2026-01-22 14:05:29.806955785 +0000 UTC m=+144.799417643" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.807300 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v9nd6"] Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.825375 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.837705 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l5nvn" podStartSLOduration=124.837684876 podStartE2EDuration="2m4.837684876s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.818835255 +0000 UTC m=+144.811297113" watchObservedRunningTime="2026-01-22 14:05:29.837684876 +0000 UTC m=+144.830146734" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.846262 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v9nd6"] Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.846638 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.855142 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4qws\" (UniqueName: \"kubernetes.io/projected/51b300c4-69d8-49d7-a5e4-ec3054ceed30-kube-api-access-c4qws\") pod \"certified-operators-jkvz7\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.901872 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.901940 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-utilities\") pod \"community-operators-v9nd6\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.901978 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9nz6\" (UniqueName: \"kubernetes.io/projected/a773a1c5-5756-429a-950d-7e8b8c95341f-kube-api-access-m9nz6\") pod \"community-operators-v9nd6\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.902011 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-catalog-content\") pod \"community-operators-v9nd6\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:29 crc kubenswrapper[5017]: E0122 14:05:29.902373 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:30.4023596 +0000 UTC m=+145.394821458 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.963391 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.970746 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6d76w" podStartSLOduration=124.97072952 podStartE2EDuration="2m4.97072952s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:29.942462539 +0000 UTC m=+144.934924397" watchObservedRunningTime="2026-01-22 14:05:29.97072952 +0000 UTC m=+144.963191378" Jan 22 14:05:29 crc kubenswrapper[5017]: I0122 14:05:29.987299 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xhnxk" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.002665 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.002872 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-utilities\") pod \"community-operators-v9nd6\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.002928 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9nz6\" (UniqueName: \"kubernetes.io/projected/a773a1c5-5756-429a-950d-7e8b8c95341f-kube-api-access-m9nz6\") pod \"community-operators-v9nd6\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.002971 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-catalog-content\") pod \"community-operators-v9nd6\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.003489 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-catalog-content\") pod \"community-operators-v9nd6\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:30 crc kubenswrapper[5017]: E0122 14:05:30.003603 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:30.503585501 +0000 UTC m=+145.496047359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.003838 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-utilities\") pod \"community-operators-v9nd6\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.015398 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s9nbz"] Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.017246 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.052187 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9nz6\" (UniqueName: \"kubernetes.io/projected/a773a1c5-5756-429a-950d-7e8b8c95341f-kube-api-access-m9nz6\") pod \"community-operators-v9nd6\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.052629 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s9nbz"] Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.105799 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-utilities\") pod \"certified-operators-s9nbz\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.105854 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.105961 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-catalog-content\") pod \"certified-operators-s9nbz\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.105981 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2929\" (UniqueName: \"kubernetes.io/projected/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-kube-api-access-k2929\") pod \"certified-operators-s9nbz\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: E0122 14:05:30.107636 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:30.607623974 +0000 UTC m=+145.600085832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.131520 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-ssjmv" podStartSLOduration=9.131469507 podStartE2EDuration="9.131469507s" podCreationTimestamp="2026-01-22 14:05:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:30.072386204 +0000 UTC m=+145.064848062" watchObservedRunningTime="2026-01-22 14:05:30.131469507 +0000 UTC m=+145.123931365" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.172916 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ctq6h" podStartSLOduration=125.172902215 podStartE2EDuration="2m5.172902215s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:30.171650629 +0000 UTC m=+145.164112487" watchObservedRunningTime="2026-01-22 14:05:30.172902215 +0000 UTC m=+145.165364073" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.175408 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.186211 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rx7qv"] Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.187210 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.208017 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.208318 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-catalog-content\") pod \"certified-operators-s9nbz\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.208336 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2929\" (UniqueName: \"kubernetes.io/projected/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-kube-api-access-k2929\") pod \"certified-operators-s9nbz\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.208369 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-utilities\") pod \"certified-operators-s9nbz\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.209292 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-utilities\") pod \"certified-operators-s9nbz\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: E0122 14:05:30.209386 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:30.7093681 +0000 UTC m=+145.701829958 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.209600 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-catalog-content\") pod \"certified-operators-s9nbz\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.211546 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.213700 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-l5x2x" podStartSLOduration=125.213685024 podStartE2EDuration="2m5.213685024s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:30.20759971 +0000 UTC m=+145.200061558" watchObservedRunningTime="2026-01-22 14:05:30.213685024 +0000 UTC m=+145.206146882" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.236188 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rx7qv"] Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.279007 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2929\" (UniqueName: \"kubernetes.io/projected/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-kube-api-access-k2929\") pod \"certified-operators-s9nbz\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.312934 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsx6c\" (UniqueName: \"kubernetes.io/projected/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-kube-api-access-nsx6c\") pod \"community-operators-rx7qv\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.312971 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-catalog-content\") pod \"community-operators-rx7qv\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.312995 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.313028 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-utilities\") pod \"community-operators-rx7qv\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: E0122 14:05:30.313363 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:30.813349941 +0000 UTC m=+145.805811799 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.351461 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.406133 5017 patch_prober.go:28] interesting pod/router-default-5444994796-tm429 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:05:30 crc kubenswrapper[5017]: [-]has-synced failed: reason withheld Jan 22 14:05:30 crc kubenswrapper[5017]: [+]process-running ok Jan 22 14:05:30 crc kubenswrapper[5017]: healthz check failed Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.406185 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tm429" podUID="10105fdd-b631-4438-a0c7-308a45f9e11e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.413961 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.414202 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-utilities\") pod \"community-operators-rx7qv\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.414305 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsx6c\" (UniqueName: \"kubernetes.io/projected/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-kube-api-access-nsx6c\") pod \"community-operators-rx7qv\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.414329 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-catalog-content\") pod \"community-operators-rx7qv\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.414715 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-catalog-content\") pod \"community-operators-rx7qv\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: E0122 14:05:30.414789 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:30.914771578 +0000 UTC m=+145.907233436 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.414997 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-utilities\") pod \"community-operators-rx7qv\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.478673 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsx6c\" (UniqueName: \"kubernetes.io/projected/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-kube-api-access-nsx6c\") pod \"community-operators-rx7qv\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.489875 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" event={"ID":"6fd07d3c-3d37-4d33-b14d-94a806908619","Type":"ContainerStarted","Data":"1b8d18a512bd022e915c325f9026d4fe701b61c367691d85ab29f9c6c1d8cea0"} Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.496872 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.506001 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.525555 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:30 crc kubenswrapper[5017]: E0122 14:05:30.525955 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.025939454 +0000 UTC m=+146.018401312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.532452 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.604514 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jkvz7"] Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.627747 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:30 crc kubenswrapper[5017]: E0122 14:05:30.629453 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.129438311 +0000 UTC m=+146.121900169 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.729441 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:30 crc kubenswrapper[5017]: E0122 14:05:30.729816 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.229801108 +0000 UTC m=+146.222262966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.770850 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v9nd6"] Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.830257 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:30 crc kubenswrapper[5017]: E0122 14:05:30.831135 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.331107402 +0000 UTC m=+146.323569260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.917561 5017 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 22 14:05:30 crc kubenswrapper[5017]: I0122 14:05:30.933661 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:30 crc kubenswrapper[5017]: E0122 14:05:30.934236 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.434219128 +0000 UTC m=+146.426680986 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.034801 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:31 crc kubenswrapper[5017]: E0122 14:05:31.035416 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.535395508 +0000 UTC m=+146.527857366 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.137295 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.138259 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:31 crc kubenswrapper[5017]: E0122 14:05:31.138584 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.638574046 +0000 UTC m=+146.631035904 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.141655 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s9nbz"] Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.239881 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.239953 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/188093c1-77b0-4e37-a53b-f3974ad22cff-config-volume\") pod \"188093c1-77b0-4e37-a53b-f3974ad22cff\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.240001 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbjjk\" (UniqueName: \"kubernetes.io/projected/188093c1-77b0-4e37-a53b-f3974ad22cff-kube-api-access-bbjjk\") pod \"188093c1-77b0-4e37-a53b-f3974ad22cff\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.240051 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/188093c1-77b0-4e37-a53b-f3974ad22cff-secret-volume\") pod \"188093c1-77b0-4e37-a53b-f3974ad22cff\" (UID: \"188093c1-77b0-4e37-a53b-f3974ad22cff\") " Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.245190 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/188093c1-77b0-4e37-a53b-f3974ad22cff-config-volume" (OuterVolumeSpecName: "config-volume") pod "188093c1-77b0-4e37-a53b-f3974ad22cff" (UID: "188093c1-77b0-4e37-a53b-f3974ad22cff"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:05:31 crc kubenswrapper[5017]: E0122 14:05:31.245394 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.745357457 +0000 UTC m=+146.737819445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.254101 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/188093c1-77b0-4e37-a53b-f3974ad22cff-kube-api-access-bbjjk" (OuterVolumeSpecName: "kube-api-access-bbjjk") pod "188093c1-77b0-4e37-a53b-f3974ad22cff" (UID: "188093c1-77b0-4e37-a53b-f3974ad22cff"). InnerVolumeSpecName "kube-api-access-bbjjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.254330 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/188093c1-77b0-4e37-a53b-f3974ad22cff-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "188093c1-77b0-4e37-a53b-f3974ad22cff" (UID: "188093c1-77b0-4e37-a53b-f3974ad22cff"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.343295 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.343376 5017 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/188093c1-77b0-4e37-a53b-f3974ad22cff-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.343391 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbjjk\" (UniqueName: \"kubernetes.io/projected/188093c1-77b0-4e37-a53b-f3974ad22cff-kube-api-access-bbjjk\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.343418 5017 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/188093c1-77b0-4e37-a53b-f3974ad22cff-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:31 crc kubenswrapper[5017]: E0122 14:05:31.343724 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.843709076 +0000 UTC m=+146.836170934 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.394770 5017 patch_prober.go:28] interesting pod/router-default-5444994796-tm429 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:05:31 crc kubenswrapper[5017]: [-]has-synced failed: reason withheld Jan 22 14:05:31 crc kubenswrapper[5017]: [+]process-running ok Jan 22 14:05:31 crc kubenswrapper[5017]: healthz check failed Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.394826 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tm429" podUID="10105fdd-b631-4438-a0c7-308a45f9e11e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.427776 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rx7qv"] Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.444733 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:31 crc kubenswrapper[5017]: E0122 14:05:31.444934 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.944904267 +0000 UTC m=+146.937366125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.445019 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:31 crc kubenswrapper[5017]: E0122 14:05:31.445415 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:05:31.945405182 +0000 UTC m=+146.937867110 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tn54n" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.459561 5017 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-22T14:05:30.917590341Z","Handler":null,"Name":""} Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.462507 5017 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.462545 5017 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.495669 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9nbz" event={"ID":"5b6afb3f-3d0e-4c54-8fc9-c112742976cc","Type":"ContainerStarted","Data":"c416cf00c1622d0b12ddd66c77e0b5771f916c2150c3f76ddd697a6d93783cf7"} Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.496670 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9nd6" event={"ID":"a773a1c5-5756-429a-950d-7e8b8c95341f","Type":"ContainerStarted","Data":"e52e1ddf8b3781e44a5eab79a10553800097c43eaa8dc60e12fe86f4cea3a816"} Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.498490 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" event={"ID":"188093c1-77b0-4e37-a53b-f3974ad22cff","Type":"ContainerDied","Data":"ba9ddced3ad778434f24148b69a32fb5a41cf53c64996f6f9a41dd6dbd7eafa1"} Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.498515 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba9ddced3ad778434f24148b69a32fb5a41cf53c64996f6f9a41dd6dbd7eafa1" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.498557 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-hsdps" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.501173 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkvz7" event={"ID":"51b300c4-69d8-49d7-a5e4-ec3054ceed30","Type":"ContainerStarted","Data":"feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d"} Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.501208 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkvz7" event={"ID":"51b300c4-69d8-49d7-a5e4-ec3054ceed30","Type":"ContainerStarted","Data":"966d05eb7ecb97edbdcdc11a32e3e6500e84256f1ee612ddf7a6e836fefc1f51"} Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.504473 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" event={"ID":"6fd07d3c-3d37-4d33-b14d-94a806908619","Type":"ContainerStarted","Data":"bc6314db054222bda1758a603b161919829a6a9d22ef9e4a54461fd20ebbd83b"} Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.509676 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-r4qhd" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.546614 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.564214 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h9dv2"] Jan 22 14:05:31 crc kubenswrapper[5017]: E0122 14:05:31.564443 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="188093c1-77b0-4e37-a53b-f3974ad22cff" containerName="collect-profiles" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.564457 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="188093c1-77b0-4e37-a53b-f3974ad22cff" containerName="collect-profiles" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.564573 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="188093c1-77b0-4e37-a53b-f3974ad22cff" containerName="collect-profiles" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.565230 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.566745 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 14:05:31 crc kubenswrapper[5017]: W0122 14:05:31.574608 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8304b75b_b8b5_40c2_a63a_7ce2e5e2db54.slice/crio-cd45fa2ee3cda7cf39769347be5695b879afe6c627db325a3f290bf2ecbe39d2 WatchSource:0}: Error finding container cd45fa2ee3cda7cf39769347be5695b879afe6c627db325a3f290bf2ecbe39d2: Status 404 returned error can't find the container with id cd45fa2ee3cda7cf39769347be5695b879afe6c627db325a3f290bf2ecbe39d2 Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.574995 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h9dv2"] Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.575401 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.648486 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.648695 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-utilities\") pod \"redhat-marketplace-h9dv2\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.648890 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2zz9\" (UniqueName: \"kubernetes.io/projected/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-kube-api-access-m2zz9\") pod \"redhat-marketplace-h9dv2\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.649033 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-catalog-content\") pod \"redhat-marketplace-h9dv2\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.653244 5017 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.653297 5017 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.750678 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-utilities\") pod \"redhat-marketplace-h9dv2\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.750790 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2zz9\" (UniqueName: \"kubernetes.io/projected/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-kube-api-access-m2zz9\") pod \"redhat-marketplace-h9dv2\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.750842 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-catalog-content\") pod \"redhat-marketplace-h9dv2\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.751261 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-catalog-content\") pod \"redhat-marketplace-h9dv2\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.751277 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-utilities\") pod \"redhat-marketplace-h9dv2\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.767567 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2zz9\" (UniqueName: \"kubernetes.io/projected/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-kube-api-access-m2zz9\") pod \"redhat-marketplace-h9dv2\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.819888 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tn54n\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.882786 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.897407 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.964602 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-c5j96"] Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.965628 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:31 crc kubenswrapper[5017]: I0122 14:05:31.978190 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c5j96"] Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.054017 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqj6x\" (UniqueName: \"kubernetes.io/projected/65614518-ced0-4d7b-86f0-2ae04bbcb58a-kube-api-access-kqj6x\") pod \"redhat-marketplace-c5j96\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.054218 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-utilities\") pod \"redhat-marketplace-c5j96\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.054322 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-catalog-content\") pod \"redhat-marketplace-c5j96\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.156244 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-catalog-content\") pod \"redhat-marketplace-c5j96\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.156332 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqj6x\" (UniqueName: \"kubernetes.io/projected/65614518-ced0-4d7b-86f0-2ae04bbcb58a-kube-api-access-kqj6x\") pod \"redhat-marketplace-c5j96\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.156352 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-utilities\") pod \"redhat-marketplace-c5j96\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.157224 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-utilities\") pod \"redhat-marketplace-c5j96\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.157268 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-catalog-content\") pod \"redhat-marketplace-c5j96\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.194181 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqj6x\" (UniqueName: \"kubernetes.io/projected/65614518-ced0-4d7b-86f0-2ae04bbcb58a-kube-api-access-kqj6x\") pod \"redhat-marketplace-c5j96\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.277998 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tn54n"] Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.357843 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.358234 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.363246 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.394834 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h9dv2"] Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.395691 5017 patch_prober.go:28] interesting pod/router-default-5444994796-tm429 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:05:32 crc kubenswrapper[5017]: [-]has-synced failed: reason withheld Jan 22 14:05:32 crc kubenswrapper[5017]: [+]process-running ok Jan 22 14:05:32 crc kubenswrapper[5017]: healthz check failed Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.395762 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tm429" podUID="10105fdd-b631-4438-a0c7-308a45f9e11e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.460266 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.460350 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.460404 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.465882 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.467919 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.469053 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.472378 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.527084 5017 generic.go:334] "Generic (PLEG): container finished" podID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerID="feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c" exitCode=0 Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.527212 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9nd6" event={"ID":"a773a1c5-5756-429a-950d-7e8b8c95341f","Type":"ContainerDied","Data":"feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c"} Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.528986 5017 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.535872 5017 generic.go:334] "Generic (PLEG): container finished" podID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerID="dc48753f0e25991475439f7bd577cb82021cf6964dae56f44258147982ba63b2" exitCode=0 Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.535950 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx7qv" event={"ID":"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54","Type":"ContainerDied","Data":"dc48753f0e25991475439f7bd577cb82021cf6964dae56f44258147982ba63b2"} Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.535981 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx7qv" event={"ID":"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54","Type":"ContainerStarted","Data":"cd45fa2ee3cda7cf39769347be5695b879afe6c627db325a3f290bf2ecbe39d2"} Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.544377 5017 generic.go:334] "Generic (PLEG): container finished" podID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerID="feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d" exitCode=0 Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.544451 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkvz7" event={"ID":"51b300c4-69d8-49d7-a5e4-ec3054ceed30","Type":"ContainerDied","Data":"feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d"} Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.557049 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" event={"ID":"74271c15-b41a-4067-996a-3d24215d6eb8","Type":"ContainerStarted","Data":"02048b02d3af33cfc22adc8c3449a08a0f8eb17393b1ebf40899c4a7b1856575"} Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.557099 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" event={"ID":"74271c15-b41a-4067-996a-3d24215d6eb8","Type":"ContainerStarted","Data":"669eab79893b8ede47456e6aece8be2fa48cd1aeba9c8c258ea8e1839b142c94"} Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.557141 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.566288 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" event={"ID":"6fd07d3c-3d37-4d33-b14d-94a806908619","Type":"ContainerStarted","Data":"ea0269ae765dea6e93658ebf2845c2918790d662b33649fa0444b007d3155a0d"} Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.578785 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.580040 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h9dv2" event={"ID":"c820f9cc-89d7-40d4-9113-8ac2982a3bbf","Type":"ContainerStarted","Data":"f3974050ab0e4e907f9b9a4afe75e261c605113a39e65ce433b459d8db5bc074"} Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.582733 5017 generic.go:334] "Generic (PLEG): container finished" podID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerID="0b2653fdcc3e9b0ded9218ac25fd5680b692243f9ee52348b070d7ae725fb5f0" exitCode=0 Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.594803 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.602693 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9nbz" event={"ID":"5b6afb3f-3d0e-4c54-8fc9-c112742976cc","Type":"ContainerDied","Data":"0b2653fdcc3e9b0ded9218ac25fd5680b692243f9ee52348b070d7ae725fb5f0"} Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.631033 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-4j8zp" podStartSLOduration=11.631016558 podStartE2EDuration="11.631016558s" podCreationTimestamp="2026-01-22 14:05:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:32.629615988 +0000 UTC m=+147.622077856" watchObservedRunningTime="2026-01-22 14:05:32.631016558 +0000 UTC m=+147.623478416" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.670511 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c5j96"] Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.717730 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" podStartSLOduration=127.717707393 podStartE2EDuration="2m7.717707393s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:32.714506781 +0000 UTC m=+147.706968659" watchObservedRunningTime="2026-01-22 14:05:32.717707393 +0000 UTC m=+147.710169251" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.963380 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xhq9v"] Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.987673 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.994074 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 14:05:32 crc kubenswrapper[5017]: I0122 14:05:32.996027 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xhq9v"] Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.094598 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-catalog-content\") pod \"redhat-operators-xhq9v\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.094941 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-utilities\") pod \"redhat-operators-xhq9v\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.094995 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6zt5\" (UniqueName: \"kubernetes.io/projected/194b80b4-3a8d-4530-8581-53cc45997936-kube-api-access-p6zt5\") pod \"redhat-operators-xhq9v\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:33 crc kubenswrapper[5017]: W0122 14:05:33.143416 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-57f6b47f7dce0f5cd0ebe45bffe18faa1c78621665e37c8ca6ceee6cb193abbd WatchSource:0}: Error finding container 57f6b47f7dce0f5cd0ebe45bffe18faa1c78621665e37c8ca6ceee6cb193abbd: Status 404 returned error can't find the container with id 57f6b47f7dce0f5cd0ebe45bffe18faa1c78621665e37c8ca6ceee6cb193abbd Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.196310 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-utilities\") pod \"redhat-operators-xhq9v\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.196392 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6zt5\" (UniqueName: \"kubernetes.io/projected/194b80b4-3a8d-4530-8581-53cc45997936-kube-api-access-p6zt5\") pod \"redhat-operators-xhq9v\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.196464 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-catalog-content\") pod \"redhat-operators-xhq9v\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.196829 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-utilities\") pod \"redhat-operators-xhq9v\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.196903 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-catalog-content\") pod \"redhat-operators-xhq9v\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.220783 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6zt5\" (UniqueName: \"kubernetes.io/projected/194b80b4-3a8d-4530-8581-53cc45997936-kube-api-access-p6zt5\") pod \"redhat-operators-xhq9v\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.267009 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.301521 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.301726 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.307728 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.319816 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.361319 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9f9c6"] Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.362327 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.374070 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9f9c6"] Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.396331 5017 patch_prober.go:28] interesting pod/router-default-5444994796-tm429 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:05:33 crc kubenswrapper[5017]: [-]has-synced failed: reason withheld Jan 22 14:05:33 crc kubenswrapper[5017]: [+]process-running ok Jan 22 14:05:33 crc kubenswrapper[5017]: healthz check failed Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.396405 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tm429" podUID="10105fdd-b631-4438-a0c7-308a45f9e11e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.401472 5017 patch_prober.go:28] interesting pod/downloads-7954f5f757-sk2hg container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.401534 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-sk2hg" podUID="0e0b63e6-f2e1-4ed1-8501-3d0420964499" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.402081 5017 patch_prober.go:28] interesting pod/downloads-7954f5f757-sk2hg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.402175 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sk2hg" podUID="0e0b63e6-f2e1-4ed1-8501-3d0420964499" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.409151 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.409711 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.417354 5017 patch_prober.go:28] interesting pod/console-f9d7485db-j4qnq container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.417407 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-j4qnq" podUID="66a013c9-01a4-4027-99ec-185312c81a41" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.488584 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.489839 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.491913 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.494625 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.494866 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.503859 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-utilities\") pod \"redhat-operators-9f9c6\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.503930 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fffbl\" (UniqueName: \"kubernetes.io/projected/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-kube-api-access-fffbl\") pod \"redhat-operators-9f9c6\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.503970 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-catalog-content\") pod \"redhat-operators-9f9c6\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.590498 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xhq9v"] Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.599365 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"57f6b47f7dce0f5cd0ebe45bffe18faa1c78621665e37c8ca6ceee6cb193abbd"} Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.606202 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-utilities\") pod \"redhat-operators-9f9c6\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.606264 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fffbl\" (UniqueName: \"kubernetes.io/projected/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-kube-api-access-fffbl\") pod \"redhat-operators-9f9c6\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.606301 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-catalog-content\") pod \"redhat-operators-9f9c6\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.606359 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d214ded6-d7f7-489b-bddd-094e55d648a9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d214ded6-d7f7-489b-bddd-094e55d648a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.606395 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d214ded6-d7f7-489b-bddd-094e55d648a9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d214ded6-d7f7-489b-bddd-094e55d648a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.606770 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-utilities\") pod \"redhat-operators-9f9c6\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.607261 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-catalog-content\") pod \"redhat-operators-9f9c6\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.628036 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"c1145577f94746126f75cee5857a83b5c74f614eac6f91a0ca04662426ea9cb1"} Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.634275 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fffbl\" (UniqueName: \"kubernetes.io/projected/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-kube-api-access-fffbl\") pod \"redhat-operators-9f9c6\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.638511 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"61f5b7ea80652a8b83050dbb01c5b1ad74056f5c7bdab86ade33708622e1d78d"} Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.645327 5017 generic.go:334] "Generic (PLEG): container finished" podID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerID="9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4" exitCode=0 Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.645460 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h9dv2" event={"ID":"c820f9cc-89d7-40d4-9113-8ac2982a3bbf","Type":"ContainerDied","Data":"9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4"} Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.647251 5017 generic.go:334] "Generic (PLEG): container finished" podID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerID="5aaf14bbc09c0158447fa7fd65598ff6d6460ec8318584da72d8c26103bfa1c3" exitCode=0 Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.648090 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c5j96" event={"ID":"65614518-ced0-4d7b-86f0-2ae04bbcb58a","Type":"ContainerDied","Data":"5aaf14bbc09c0158447fa7fd65598ff6d6460ec8318584da72d8c26103bfa1c3"} Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.648152 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c5j96" event={"ID":"65614518-ced0-4d7b-86f0-2ae04bbcb58a","Type":"ContainerStarted","Data":"07dbbea1e35ea057ec169009dd44b6eaac139440c13f4067052cc411a390b4dc"} Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.653653 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-qtbg9" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.684913 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.707286 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d214ded6-d7f7-489b-bddd-094e55d648a9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d214ded6-d7f7-489b-bddd-094e55d648a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.707819 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d214ded6-d7f7-489b-bddd-094e55d648a9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d214ded6-d7f7-489b-bddd-094e55d648a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.707907 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d214ded6-d7f7-489b-bddd-094e55d648a9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d214ded6-d7f7-489b-bddd-094e55d648a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.739942 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d214ded6-d7f7-489b-bddd-094e55d648a9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d214ded6-d7f7-489b-bddd-094e55d648a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:05:33 crc kubenswrapper[5017]: I0122 14:05:33.824478 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.195444 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.209543 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9f9c6"] Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.391950 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.395940 5017 patch_prober.go:28] interesting pod/router-default-5444994796-tm429 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:05:34 crc kubenswrapper[5017]: [-]has-synced failed: reason withheld Jan 22 14:05:34 crc kubenswrapper[5017]: [+]process-running ok Jan 22 14:05:34 crc kubenswrapper[5017]: healthz check failed Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.395989 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tm429" podUID="10105fdd-b631-4438-a0c7-308a45f9e11e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.682534 5017 generic.go:334] "Generic (PLEG): container finished" podID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerID="457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f" exitCode=0 Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.682603 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9f9c6" event={"ID":"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1","Type":"ContainerDied","Data":"457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f"} Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.682910 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9f9c6" event={"ID":"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1","Type":"ContainerStarted","Data":"9c15ff7482028defb8dada9f9b93d62eb4b40dd14d2120d1af13e70c8516e3a6"} Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.686916 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"2017f6a5fe6ed99467c556890bab834edba5a673178b3c9724531e23212f5595"} Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.687342 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.694691 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"8e6d270c1d4957584a9a3dc064e47ff55b5e3f44d0522a789ab90e9170f3689e"} Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.697794 5017 generic.go:334] "Generic (PLEG): container finished" podID="194b80b4-3a8d-4530-8581-53cc45997936" containerID="47efa657e4829666b1fb21cd47362af62022fc56101ef604d54a0a4fe3657c90" exitCode=0 Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.697848 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xhq9v" event={"ID":"194b80b4-3a8d-4530-8581-53cc45997936","Type":"ContainerDied","Data":"47efa657e4829666b1fb21cd47362af62022fc56101ef604d54a0a4fe3657c90"} Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.697864 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xhq9v" event={"ID":"194b80b4-3a8d-4530-8581-53cc45997936","Type":"ContainerStarted","Data":"7bb9a2658afb67f765a657e1980fe840d5b4b7a5c9826124e5741e12681377eb"} Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.715937 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"22a9419899880c92188d2b0afc404165352e53514c9381282620781e6cdfb8f2"} Jan 22 14:05:34 crc kubenswrapper[5017]: I0122 14:05:34.717677 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d214ded6-d7f7-489b-bddd-094e55d648a9","Type":"ContainerStarted","Data":"611aa50c2a8505975106759c38cdaae882e0599cedd0b3710b09e61bd357a8bb"} Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.394619 5017 patch_prober.go:28] interesting pod/router-default-5444994796-tm429 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:05:35 crc kubenswrapper[5017]: [-]has-synced failed: reason withheld Jan 22 14:05:35 crc kubenswrapper[5017]: [+]process-running ok Jan 22 14:05:35 crc kubenswrapper[5017]: healthz check failed Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.394740 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tm429" podUID="10105fdd-b631-4438-a0c7-308a45f9e11e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.637609 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.639422 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.642944 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.643216 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.646421 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.728108 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d214ded6-d7f7-489b-bddd-094e55d648a9","Type":"ContainerStarted","Data":"8c10a232439df5ff2808dee8f24abc034ba320e4941688780daa3c884a85caf6"} Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.787437 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/290be0b8-f743-42f8-a9a4-4deea9a116dc-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"290be0b8-f743-42f8-a9a4-4deea9a116dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.787562 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/290be0b8-f743-42f8-a9a4-4deea9a116dc-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"290be0b8-f743-42f8-a9a4-4deea9a116dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.888566 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/290be0b8-f743-42f8-a9a4-4deea9a116dc-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"290be0b8-f743-42f8-a9a4-4deea9a116dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.888639 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/290be0b8-f743-42f8-a9a4-4deea9a116dc-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"290be0b8-f743-42f8-a9a4-4deea9a116dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.889047 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/290be0b8-f743-42f8-a9a4-4deea9a116dc-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"290be0b8-f743-42f8-a9a4-4deea9a116dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:05:35 crc kubenswrapper[5017]: I0122 14:05:35.941482 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/290be0b8-f743-42f8-a9a4-4deea9a116dc-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"290be0b8-f743-42f8-a9a4-4deea9a116dc\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:05:36 crc kubenswrapper[5017]: I0122 14:05:36.052330 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:05:36 crc kubenswrapper[5017]: I0122 14:05:36.396068 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:36 crc kubenswrapper[5017]: I0122 14:05:36.399346 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-tm429" Jan 22 14:05:36 crc kubenswrapper[5017]: I0122 14:05:36.596148 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 14:05:36 crc kubenswrapper[5017]: I0122 14:05:36.768861 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"290be0b8-f743-42f8-a9a4-4deea9a116dc","Type":"ContainerStarted","Data":"37f8a11b5c967cc10659dbd1df10f7c95368089b8c91f7cad6884e0a4f775adf"} Jan 22 14:05:36 crc kubenswrapper[5017]: I0122 14:05:36.771882 5017 generic.go:334] "Generic (PLEG): container finished" podID="d214ded6-d7f7-489b-bddd-094e55d648a9" containerID="8c10a232439df5ff2808dee8f24abc034ba320e4941688780daa3c884a85caf6" exitCode=0 Jan 22 14:05:36 crc kubenswrapper[5017]: I0122 14:05:36.772721 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d214ded6-d7f7-489b-bddd-094e55d648a9","Type":"ContainerDied","Data":"8c10a232439df5ff2808dee8f24abc034ba320e4941688780daa3c884a85caf6"} Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.048198 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.123975 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d214ded6-d7f7-489b-bddd-094e55d648a9-kube-api-access\") pod \"d214ded6-d7f7-489b-bddd-094e55d648a9\" (UID: \"d214ded6-d7f7-489b-bddd-094e55d648a9\") " Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.124147 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d214ded6-d7f7-489b-bddd-094e55d648a9-kubelet-dir\") pod \"d214ded6-d7f7-489b-bddd-094e55d648a9\" (UID: \"d214ded6-d7f7-489b-bddd-094e55d648a9\") " Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.124334 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d214ded6-d7f7-489b-bddd-094e55d648a9-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d214ded6-d7f7-489b-bddd-094e55d648a9" (UID: "d214ded6-d7f7-489b-bddd-094e55d648a9"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.124656 5017 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d214ded6-d7f7-489b-bddd-094e55d648a9-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.130268 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d214ded6-d7f7-489b-bddd-094e55d648a9-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d214ded6-d7f7-489b-bddd-094e55d648a9" (UID: "d214ded6-d7f7-489b-bddd-094e55d648a9"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.226163 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d214ded6-d7f7-489b-bddd-094e55d648a9-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.799235 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"290be0b8-f743-42f8-a9a4-4deea9a116dc","Type":"ContainerStarted","Data":"0532141c5b9993a2d60f49c78e88878d02d28bb70006967b7920028932bb86a6"} Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.810209 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d214ded6-d7f7-489b-bddd-094e55d648a9","Type":"ContainerDied","Data":"611aa50c2a8505975106759c38cdaae882e0599cedd0b3710b09e61bd357a8bb"} Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.810254 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="611aa50c2a8505975106759c38cdaae882e0599cedd0b3710b09e61bd357a8bb" Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.810285 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:05:37 crc kubenswrapper[5017]: I0122 14:05:37.855255 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.855229512 podStartE2EDuration="2.855229512s" podCreationTimestamp="2026-01-22 14:05:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:05:37.855088018 +0000 UTC m=+152.847549876" watchObservedRunningTime="2026-01-22 14:05:37.855229512 +0000 UTC m=+152.847691370" Jan 22 14:05:39 crc kubenswrapper[5017]: I0122 14:05:39.292570 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-ssjmv" Jan 22 14:05:39 crc kubenswrapper[5017]: I0122 14:05:39.899714 5017 generic.go:334] "Generic (PLEG): container finished" podID="290be0b8-f743-42f8-a9a4-4deea9a116dc" containerID="0532141c5b9993a2d60f49c78e88878d02d28bb70006967b7920028932bb86a6" exitCode=0 Jan 22 14:05:39 crc kubenswrapper[5017]: I0122 14:05:39.900037 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"290be0b8-f743-42f8-a9a4-4deea9a116dc","Type":"ContainerDied","Data":"0532141c5b9993a2d60f49c78e88878d02d28bb70006967b7920028932bb86a6"} Jan 22 14:05:43 crc kubenswrapper[5017]: I0122 14:05:43.413141 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-sk2hg" Jan 22 14:05:43 crc kubenswrapper[5017]: I0122 14:05:43.423056 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:43 crc kubenswrapper[5017]: I0122 14:05:43.430008 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:05:46 crc kubenswrapper[5017]: I0122 14:05:46.764625 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gbbvd"] Jan 22 14:05:46 crc kubenswrapper[5017]: I0122 14:05:46.766386 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" podUID="2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" containerName="controller-manager" containerID="cri-o://655ad5d27f6930521fa0a5421202b7b12e08666570f545cec10387040c9476c1" gracePeriod=30 Jan 22 14:05:46 crc kubenswrapper[5017]: I0122 14:05:46.782862 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74"] Jan 22 14:05:46 crc kubenswrapper[5017]: I0122 14:05:46.783145 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" podUID="1a853328-872d-4a4e-98d1-681be7eb3603" containerName="route-controller-manager" containerID="cri-o://d4fe4ac8eb6d48f705da75faab75aba7b850cb548a5005581e2c92192048eaf0" gracePeriod=30 Jan 22 14:05:47 crc kubenswrapper[5017]: I0122 14:05:47.919889 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:47 crc kubenswrapper[5017]: I0122 14:05:47.939983 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bca74e01-1d78-4eeb-b3db-842fda0babd7-metrics-certs\") pod \"network-metrics-daemon-gc29v\" (UID: \"bca74e01-1d78-4eeb-b3db-842fda0babd7\") " pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:47 crc kubenswrapper[5017]: I0122 14:05:47.957682 5017 generic.go:334] "Generic (PLEG): container finished" podID="1a853328-872d-4a4e-98d1-681be7eb3603" containerID="d4fe4ac8eb6d48f705da75faab75aba7b850cb548a5005581e2c92192048eaf0" exitCode=0 Jan 22 14:05:47 crc kubenswrapper[5017]: I0122 14:05:47.957767 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" event={"ID":"1a853328-872d-4a4e-98d1-681be7eb3603","Type":"ContainerDied","Data":"d4fe4ac8eb6d48f705da75faab75aba7b850cb548a5005581e2c92192048eaf0"} Jan 22 14:05:47 crc kubenswrapper[5017]: I0122 14:05:47.958931 5017 generic.go:334] "Generic (PLEG): container finished" podID="2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" containerID="655ad5d27f6930521fa0a5421202b7b12e08666570f545cec10387040c9476c1" exitCode=0 Jan 22 14:05:47 crc kubenswrapper[5017]: I0122 14:05:47.958981 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" event={"ID":"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8","Type":"ContainerDied","Data":"655ad5d27f6930521fa0a5421202b7b12e08666570f545cec10387040c9476c1"} Jan 22 14:05:48 crc kubenswrapper[5017]: I0122 14:05:48.185954 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gc29v" Jan 22 14:05:51 crc kubenswrapper[5017]: I0122 14:05:51.905932 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:05:52 crc kubenswrapper[5017]: I0122 14:05:52.884478 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:05:52 crc kubenswrapper[5017]: I0122 14:05:52.884543 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:05:52 crc kubenswrapper[5017]: I0122 14:05:52.998672 5017 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-rtv74 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Jan 22 14:05:52 crc kubenswrapper[5017]: I0122 14:05:52.998726 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" podUID="1a853328-872d-4a4e-98d1-681be7eb3603" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Jan 22 14:05:54 crc kubenswrapper[5017]: I0122 14:05:54.455378 5017 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-gbbvd container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 14:05:54 crc kubenswrapper[5017]: I0122 14:05:54.455722 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" podUID="2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 14:06:03 crc kubenswrapper[5017]: I0122 14:06:03.966318 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-htpml" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.003198 5017 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-rtv74 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.003268 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" podUID="1a853328-872d-4a4e-98d1-681be7eb3603" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.044559 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" event={"ID":"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8","Type":"ContainerDied","Data":"3353cd61ea7df5e804ea9877f540548404ae76d5eeee1c5c7860d646cbb12c8a"} Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.044607 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3353cd61ea7df5e804ea9877f540548404ae76d5eeee1c5c7860d646cbb12c8a" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.047564 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"290be0b8-f743-42f8-a9a4-4deea9a116dc","Type":"ContainerDied","Data":"37f8a11b5c967cc10659dbd1df10f7c95368089b8c91f7cad6884e0a4f775adf"} Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.047594 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37f8a11b5c967cc10659dbd1df10f7c95368089b8c91f7cad6884e0a4f775adf" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.049430 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" event={"ID":"1a853328-872d-4a4e-98d1-681be7eb3603","Type":"ContainerDied","Data":"892864dc58350d7357b086aeec54ced5478efff0f7e2dc46e5f48ad6c1855280"} Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.049470 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="892864dc58350d7357b086aeec54ced5478efff0f7e2dc46e5f48ad6c1855280" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.056213 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.062494 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.087851 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.232181 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/290be0b8-f743-42f8-a9a4-4deea9a116dc-kubelet-dir\") pod \"290be0b8-f743-42f8-a9a4-4deea9a116dc\" (UID: \"290be0b8-f743-42f8-a9a4-4deea9a116dc\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.232409 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/290be0b8-f743-42f8-a9a4-4deea9a116dc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "290be0b8-f743-42f8-a9a4-4deea9a116dc" (UID: "290be0b8-f743-42f8-a9a4-4deea9a116dc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.233285 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-config\") pod \"1a853328-872d-4a4e-98d1-681be7eb3603\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.233358 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a853328-872d-4a4e-98d1-681be7eb3603-serving-cert\") pod \"1a853328-872d-4a4e-98d1-681be7eb3603\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.233388 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-serving-cert\") pod \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.233456 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkn4n\" (UniqueName: \"kubernetes.io/projected/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-kube-api-access-lkn4n\") pod \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.233493 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-client-ca\") pod \"1a853328-872d-4a4e-98d1-681be7eb3603\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.233535 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j48bd\" (UniqueName: \"kubernetes.io/projected/1a853328-872d-4a4e-98d1-681be7eb3603-kube-api-access-j48bd\") pod \"1a853328-872d-4a4e-98d1-681be7eb3603\" (UID: \"1a853328-872d-4a4e-98d1-681be7eb3603\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.233578 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-client-ca\") pod \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.233611 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/290be0b8-f743-42f8-a9a4-4deea9a116dc-kube-api-access\") pod \"290be0b8-f743-42f8-a9a4-4deea9a116dc\" (UID: \"290be0b8-f743-42f8-a9a4-4deea9a116dc\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.233686 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-config\") pod \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.234619 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-config" (OuterVolumeSpecName: "config") pod "1a853328-872d-4a4e-98d1-681be7eb3603" (UID: "1a853328-872d-4a4e-98d1-681be7eb3603"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.235532 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-client-ca" (OuterVolumeSpecName: "client-ca") pod "1a853328-872d-4a4e-98d1-681be7eb3603" (UID: "1a853328-872d-4a4e-98d1-681be7eb3603"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.233738 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-proxy-ca-bundles\") pod \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\" (UID: \"2fd68ef9-7984-42a8-bdc0-ae7f538af9c8\") " Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.236610 5017 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/290be0b8-f743-42f8-a9a4-4deea9a116dc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.236645 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.236658 5017 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a853328-872d-4a4e-98d1-681be7eb3603-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.237106 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-client-ca" (OuterVolumeSpecName: "client-ca") pod "2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" (UID: "2fd68ef9-7984-42a8-bdc0-ae7f538af9c8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.237141 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" (UID: "2fd68ef9-7984-42a8-bdc0-ae7f538af9c8"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.238699 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-config" (OuterVolumeSpecName: "config") pod "2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" (UID: "2fd68ef9-7984-42a8-bdc0-ae7f538af9c8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.241631 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/290be0b8-f743-42f8-a9a4-4deea9a116dc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "290be0b8-f743-42f8-a9a4-4deea9a116dc" (UID: "290be0b8-f743-42f8-a9a4-4deea9a116dc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.243744 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-kube-api-access-lkn4n" (OuterVolumeSpecName: "kube-api-access-lkn4n") pod "2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" (UID: "2fd68ef9-7984-42a8-bdc0-ae7f538af9c8"). InnerVolumeSpecName "kube-api-access-lkn4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.243974 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" (UID: "2fd68ef9-7984-42a8-bdc0-ae7f538af9c8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.249544 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a853328-872d-4a4e-98d1-681be7eb3603-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1a853328-872d-4a4e-98d1-681be7eb3603" (UID: "1a853328-872d-4a4e-98d1-681be7eb3603"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.256633 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a853328-872d-4a4e-98d1-681be7eb3603-kube-api-access-j48bd" (OuterVolumeSpecName: "kube-api-access-j48bd") pod "1a853328-872d-4a4e-98d1-681be7eb3603" (UID: "1a853328-872d-4a4e-98d1-681be7eb3603"). InnerVolumeSpecName "kube-api-access-j48bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.338349 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.338395 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a853328-872d-4a4e-98d1-681be7eb3603-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.338408 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkn4n\" (UniqueName: \"kubernetes.io/projected/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-kube-api-access-lkn4n\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.338420 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j48bd\" (UniqueName: \"kubernetes.io/projected/1a853328-872d-4a4e-98d1-681be7eb3603-kube-api-access-j48bd\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.338432 5017 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.338441 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/290be0b8-f743-42f8-a9a4-4deea9a116dc-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.338452 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.338462 5017 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.453328 5017 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-gbbvd container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 14:06:04 crc kubenswrapper[5017]: I0122 14:06:04.453438 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" podUID="2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 14:06:05 crc kubenswrapper[5017]: I0122 14:06:05.054443 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gbbvd" Jan 22 14:06:05 crc kubenswrapper[5017]: I0122 14:06:05.054489 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:06:05 crc kubenswrapper[5017]: I0122 14:06:05.054453 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74" Jan 22 14:06:05 crc kubenswrapper[5017]: I0122 14:06:05.083057 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74"] Jan 22 14:06:05 crc kubenswrapper[5017]: I0122 14:06:05.085898 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rtv74"] Jan 22 14:06:05 crc kubenswrapper[5017]: I0122 14:06:05.097901 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gbbvd"] Jan 22 14:06:05 crc kubenswrapper[5017]: I0122 14:06:05.100475 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gbbvd"] Jan 22 14:06:05 crc kubenswrapper[5017]: I0122 14:06:05.265894 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a853328-872d-4a4e-98d1-681be7eb3603" path="/var/lib/kubelet/pods/1a853328-872d-4a4e-98d1-681be7eb3603/volumes" Jan 22 14:06:05 crc kubenswrapper[5017]: I0122 14:06:05.267365 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" path="/var/lib/kubelet/pods/2fd68ef9-7984-42a8-bdc0-ae7f538af9c8/volumes" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.242698 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv"] Jan 22 14:06:06 crc kubenswrapper[5017]: E0122 14:06:06.242959 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" containerName="controller-manager" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.242976 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" containerName="controller-manager" Jan 22 14:06:06 crc kubenswrapper[5017]: E0122 14:06:06.242996 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="290be0b8-f743-42f8-a9a4-4deea9a116dc" containerName="pruner" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.243007 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="290be0b8-f743-42f8-a9a4-4deea9a116dc" containerName="pruner" Jan 22 14:06:06 crc kubenswrapper[5017]: E0122 14:06:06.243018 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a853328-872d-4a4e-98d1-681be7eb3603" containerName="route-controller-manager" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.243029 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a853328-872d-4a4e-98d1-681be7eb3603" containerName="route-controller-manager" Jan 22 14:06:06 crc kubenswrapper[5017]: E0122 14:06:06.243045 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d214ded6-d7f7-489b-bddd-094e55d648a9" containerName="pruner" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.243052 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="d214ded6-d7f7-489b-bddd-094e55d648a9" containerName="pruner" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.243192 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fd68ef9-7984-42a8-bdc0-ae7f538af9c8" containerName="controller-manager" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.243207 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a853328-872d-4a4e-98d1-681be7eb3603" containerName="route-controller-manager" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.243226 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="d214ded6-d7f7-489b-bddd-094e55d648a9" containerName="pruner" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.243237 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="290be0b8-f743-42f8-a9a4-4deea9a116dc" containerName="pruner" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.243632 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.246598 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.246692 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.246983 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.247175 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.247668 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.251285 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.257512 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.261178 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv"] Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.336365 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl"] Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.337207 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.341330 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.341412 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.342025 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.342314 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.342780 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.343904 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.353875 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl"] Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.370936 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-client-ca\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.371051 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a19914bd-125c-4066-9c3e-7b21b50a34c9-serving-cert\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.371100 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-proxy-ca-bundles\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.371162 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-config\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.371314 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ffs9\" (UniqueName: \"kubernetes.io/projected/a19914bd-125c-4066-9c3e-7b21b50a34c9-kube-api-access-4ffs9\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.472633 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-config\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.472719 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-client-ca\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.472748 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1a1de47-f1f3-440c-bff6-d6299b56321a-serving-cert\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.472773 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a19914bd-125c-4066-9c3e-7b21b50a34c9-serving-cert\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.472791 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-proxy-ca-bundles\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.472809 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-config\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.472829 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sgcd\" (UniqueName: \"kubernetes.io/projected/c1a1de47-f1f3-440c-bff6-d6299b56321a-kube-api-access-7sgcd\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.472859 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-client-ca\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.472897 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ffs9\" (UniqueName: \"kubernetes.io/projected/a19914bd-125c-4066-9c3e-7b21b50a34c9-kube-api-access-4ffs9\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.473758 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-client-ca\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.474390 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-proxy-ca-bundles\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.474564 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-config\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.481378 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a19914bd-125c-4066-9c3e-7b21b50a34c9-serving-cert\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.500258 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ffs9\" (UniqueName: \"kubernetes.io/projected/a19914bd-125c-4066-9c3e-7b21b50a34c9-kube-api-access-4ffs9\") pod \"controller-manager-5f74fdcb55-wm9dv\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.559151 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.573850 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-config\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.573911 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1a1de47-f1f3-440c-bff6-d6299b56321a-serving-cert\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.573938 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sgcd\" (UniqueName: \"kubernetes.io/projected/c1a1de47-f1f3-440c-bff6-d6299b56321a-kube-api-access-7sgcd\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.573966 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-client-ca\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.574878 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-client-ca\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.575727 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-config\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.579622 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1a1de47-f1f3-440c-bff6-d6299b56321a-serving-cert\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.613080 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sgcd\" (UniqueName: \"kubernetes.io/projected/c1a1de47-f1f3-440c-bff6-d6299b56321a-kube-api-access-7sgcd\") pod \"route-controller-manager-66ff769677-xznpl\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:06 crc kubenswrapper[5017]: I0122 14:06:06.650160 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:08 crc kubenswrapper[5017]: E0122 14:06:08.238821 5017 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 22 14:06:08 crc kubenswrapper[5017]: E0122 14:06:08.239374 5017 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c4qws,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-jkvz7_openshift-marketplace(51b300c4-69d8-49d7-a5e4-ec3054ceed30): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:06:08 crc kubenswrapper[5017]: E0122 14:06:08.240493 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-jkvz7" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" Jan 22 14:06:09 crc kubenswrapper[5017]: E0122 14:06:09.088928 5017 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 22 14:06:09 crc kubenswrapper[5017]: E0122 14:06:09.089158 5017 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m2zz9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-h9dv2_openshift-marketplace(c820f9cc-89d7-40d4-9113-8ac2982a3bbf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:06:09 crc kubenswrapper[5017]: E0122 14:06:09.090344 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-h9dv2" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" Jan 22 14:06:11 crc kubenswrapper[5017]: I0122 14:06:11.824135 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 14:06:11 crc kubenswrapper[5017]: I0122 14:06:11.825591 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:06:11 crc kubenswrapper[5017]: I0122 14:06:11.828485 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 14:06:11 crc kubenswrapper[5017]: I0122 14:06:11.832240 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 14:06:11 crc kubenswrapper[5017]: I0122 14:06:11.837371 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 14:06:11 crc kubenswrapper[5017]: I0122 14:06:11.985963 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/81b3023c-5738-427f-86a7-179af5a2be45-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"81b3023c-5738-427f-86a7-179af5a2be45\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:06:11 crc kubenswrapper[5017]: I0122 14:06:11.986290 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/81b3023c-5738-427f-86a7-179af5a2be45-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"81b3023c-5738-427f-86a7-179af5a2be45\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:06:12 crc kubenswrapper[5017]: I0122 14:06:12.087692 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/81b3023c-5738-427f-86a7-179af5a2be45-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"81b3023c-5738-427f-86a7-179af5a2be45\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:06:12 crc kubenswrapper[5017]: I0122 14:06:12.087762 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/81b3023c-5738-427f-86a7-179af5a2be45-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"81b3023c-5738-427f-86a7-179af5a2be45\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:06:12 crc kubenswrapper[5017]: I0122 14:06:12.088149 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/81b3023c-5738-427f-86a7-179af5a2be45-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"81b3023c-5738-427f-86a7-179af5a2be45\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:06:12 crc kubenswrapper[5017]: I0122 14:06:12.117232 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/81b3023c-5738-427f-86a7-179af5a2be45-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"81b3023c-5738-427f-86a7-179af5a2be45\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:06:12 crc kubenswrapper[5017]: I0122 14:06:12.191146 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.342127 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-jkvz7" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.342313 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-h9dv2" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.425577 5017 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.425918 5017 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p6zt5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-xhq9v_openshift-marketplace(194b80b4-3a8d-4530-8581-53cc45997936): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.427479 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-xhq9v" podUID="194b80b4-3a8d-4530-8581-53cc45997936" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.444800 5017 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.444959 5017 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kqj6x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-c5j96_openshift-marketplace(65614518-ced0-4d7b-86f0-2ae04bbcb58a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.447300 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-c5j96" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.452826 5017 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.452969 5017 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fffbl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-9f9c6_openshift-marketplace(bffa30e4-e5f8-4888-9731-dfd3e4b74cd1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:06:12 crc kubenswrapper[5017]: E0122 14:06:12.454560 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-9f9c6" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" Jan 22 14:06:12 crc kubenswrapper[5017]: I0122 14:06:12.475927 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.741470 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9f9c6" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.741469 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-xhq9v" podUID="194b80b4-3a8d-4530-8581-53cc45997936" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.741815 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-c5j96" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.825488 5017 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.825852 5017 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nsx6c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-rx7qv_openshift-marketplace(8304b75b-b8b5-40c2-a63a-7ce2e5e2db54): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.827048 5017 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.827092 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-rx7qv" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.827275 5017 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m9nz6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-v9nd6_openshift-marketplace(a773a1c5-5756-429a-950d-7e8b8c95341f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.828525 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-v9nd6" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.877969 5017 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.878291 5017 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k2929,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-s9nbz_openshift-marketplace(5b6afb3f-3d0e-4c54-8fc9-c112742976cc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:06:13 crc kubenswrapper[5017]: E0122 14:06:13.879993 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-s9nbz" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" Jan 22 14:06:14 crc kubenswrapper[5017]: E0122 14:06:14.097900 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-v9nd6" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" Jan 22 14:06:14 crc kubenswrapper[5017]: E0122 14:06:14.098914 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-rx7qv" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" Jan 22 14:06:14 crc kubenswrapper[5017]: E0122 14:06:14.099078 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-s9nbz" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" Jan 22 14:06:14 crc kubenswrapper[5017]: I0122 14:06:14.191959 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 14:06:14 crc kubenswrapper[5017]: I0122 14:06:14.250084 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gc29v"] Jan 22 14:06:14 crc kubenswrapper[5017]: W0122 14:06:14.258922 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda19914bd_125c_4066_9c3e_7b21b50a34c9.slice/crio-6d3559834d933c264b2e68a3ac89a48f0ff673e99f8fa820fb9e3411b488c4b0 WatchSource:0}: Error finding container 6d3559834d933c264b2e68a3ac89a48f0ff673e99f8fa820fb9e3411b488c4b0: Status 404 returned error can't find the container with id 6d3559834d933c264b2e68a3ac89a48f0ff673e99f8fa820fb9e3411b488c4b0 Jan 22 14:06:14 crc kubenswrapper[5017]: I0122 14:06:14.259235 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl"] Jan 22 14:06:14 crc kubenswrapper[5017]: W0122 14:06:14.259806 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbca74e01_1d78_4eeb_b3db_842fda0babd7.slice/crio-9299c67122065d8264c8a5f9b89acf3d7865002e1ec80f0ccfbb03513cd6aa9a WatchSource:0}: Error finding container 9299c67122065d8264c8a5f9b89acf3d7865002e1ec80f0ccfbb03513cd6aa9a: Status 404 returned error can't find the container with id 9299c67122065d8264c8a5f9b89acf3d7865002e1ec80f0ccfbb03513cd6aa9a Jan 22 14:06:14 crc kubenswrapper[5017]: I0122 14:06:14.263881 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv"] Jan 22 14:06:14 crc kubenswrapper[5017]: W0122 14:06:14.264172 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1a1de47_f1f3_440c_bff6_d6299b56321a.slice/crio-a10316599d370052daabc4d367f508ed5cca0b907250166fca2e6855e25fe262 WatchSource:0}: Error finding container a10316599d370052daabc4d367f508ed5cca0b907250166fca2e6855e25fe262: Status 404 returned error can't find the container with id a10316599d370052daabc4d367f508ed5cca0b907250166fca2e6855e25fe262 Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.101912 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" event={"ID":"c1a1de47-f1f3-440c-bff6-d6299b56321a","Type":"ContainerStarted","Data":"c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa"} Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.104136 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.104259 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" event={"ID":"c1a1de47-f1f3-440c-bff6-d6299b56321a","Type":"ContainerStarted","Data":"a10316599d370052daabc4d367f508ed5cca0b907250166fca2e6855e25fe262"} Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.104821 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" event={"ID":"a19914bd-125c-4066-9c3e-7b21b50a34c9","Type":"ContainerStarted","Data":"99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99"} Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.104947 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" event={"ID":"a19914bd-125c-4066-9c3e-7b21b50a34c9","Type":"ContainerStarted","Data":"6d3559834d933c264b2e68a3ac89a48f0ff673e99f8fa820fb9e3411b488c4b0"} Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.105035 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.106947 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gc29v" event={"ID":"bca74e01-1d78-4eeb-b3db-842fda0babd7","Type":"ContainerStarted","Data":"c9501a927532f3b0b5e23be44286fbc43cc9591f562ce49e38b7d086f56b2ad6"} Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.106997 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gc29v" event={"ID":"bca74e01-1d78-4eeb-b3db-842fda0babd7","Type":"ContainerStarted","Data":"cfaf995c3c6333519346b86c83256fb1b8c3752a426635e0f3801a4d022e2cb6"} Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.107009 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gc29v" event={"ID":"bca74e01-1d78-4eeb-b3db-842fda0babd7","Type":"ContainerStarted","Data":"9299c67122065d8264c8a5f9b89acf3d7865002e1ec80f0ccfbb03513cd6aa9a"} Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.108481 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"81b3023c-5738-427f-86a7-179af5a2be45","Type":"ContainerStarted","Data":"a715a1c8cf576d4dd6505a7b7d9c03073128c5611f14924d949070dde4ff7a20"} Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.108540 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"81b3023c-5738-427f-86a7-179af5a2be45","Type":"ContainerStarted","Data":"c131c6a00b4f3fdf542558658f7f86449b0790357563f9e43a95e26d23e0f4b9"} Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.110928 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.112145 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.122710 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" podStartSLOduration=9.122690775 podStartE2EDuration="9.122690775s" podCreationTimestamp="2026-01-22 14:06:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:06:15.12044136 +0000 UTC m=+190.112903228" watchObservedRunningTime="2026-01-22 14:06:15.122690775 +0000 UTC m=+190.115152633" Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.143302 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-gc29v" podStartSLOduration=170.143280085 podStartE2EDuration="2m50.143280085s" podCreationTimestamp="2026-01-22 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:06:15.138953021 +0000 UTC m=+190.131414889" watchObservedRunningTime="2026-01-22 14:06:15.143280085 +0000 UTC m=+190.135741943" Jan 22 14:06:15 crc kubenswrapper[5017]: I0122 14:06:15.163500 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=4.163481854 podStartE2EDuration="4.163481854s" podCreationTimestamp="2026-01-22 14:06:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:06:15.161198039 +0000 UTC m=+190.153659897" watchObservedRunningTime="2026-01-22 14:06:15.163481854 +0000 UTC m=+190.155943712" Jan 22 14:06:16 crc kubenswrapper[5017]: I0122 14:06:16.114952 5017 generic.go:334] "Generic (PLEG): container finished" podID="81b3023c-5738-427f-86a7-179af5a2be45" containerID="a715a1c8cf576d4dd6505a7b7d9c03073128c5611f14924d949070dde4ff7a20" exitCode=0 Jan 22 14:06:16 crc kubenswrapper[5017]: I0122 14:06:16.115030 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"81b3023c-5738-427f-86a7-179af5a2be45","Type":"ContainerDied","Data":"a715a1c8cf576d4dd6505a7b7d9c03073128c5611f14924d949070dde4ff7a20"} Jan 22 14:06:16 crc kubenswrapper[5017]: I0122 14:06:16.132931 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" podStartSLOduration=10.132913274 podStartE2EDuration="10.132913274s" podCreationTimestamp="2026-01-22 14:06:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:06:15.217021989 +0000 UTC m=+190.209483847" watchObservedRunningTime="2026-01-22 14:06:16.132913274 +0000 UTC m=+191.125375132" Jan 22 14:06:17 crc kubenswrapper[5017]: I0122 14:06:17.350084 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:06:17 crc kubenswrapper[5017]: I0122 14:06:17.457620 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/81b3023c-5738-427f-86a7-179af5a2be45-kube-api-access\") pod \"81b3023c-5738-427f-86a7-179af5a2be45\" (UID: \"81b3023c-5738-427f-86a7-179af5a2be45\") " Jan 22 14:06:17 crc kubenswrapper[5017]: I0122 14:06:17.458841 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/81b3023c-5738-427f-86a7-179af5a2be45-kubelet-dir\") pod \"81b3023c-5738-427f-86a7-179af5a2be45\" (UID: \"81b3023c-5738-427f-86a7-179af5a2be45\") " Jan 22 14:06:17 crc kubenswrapper[5017]: I0122 14:06:17.459148 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/81b3023c-5738-427f-86a7-179af5a2be45-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "81b3023c-5738-427f-86a7-179af5a2be45" (UID: "81b3023c-5738-427f-86a7-179af5a2be45"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:06:17 crc kubenswrapper[5017]: I0122 14:06:17.465592 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81b3023c-5738-427f-86a7-179af5a2be45-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "81b3023c-5738-427f-86a7-179af5a2be45" (UID: "81b3023c-5738-427f-86a7-179af5a2be45"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:17 crc kubenswrapper[5017]: I0122 14:06:17.561200 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/81b3023c-5738-427f-86a7-179af5a2be45-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:17 crc kubenswrapper[5017]: I0122 14:06:17.561247 5017 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/81b3023c-5738-427f-86a7-179af5a2be45-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.127369 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"81b3023c-5738-427f-86a7-179af5a2be45","Type":"ContainerDied","Data":"c131c6a00b4f3fdf542558658f7f86449b0790357563f9e43a95e26d23e0f4b9"} Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.127692 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c131c6a00b4f3fdf542558658f7f86449b0790357563f9e43a95e26d23e0f4b9" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.127486 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.825610 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 14:06:18 crc kubenswrapper[5017]: E0122 14:06:18.825905 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81b3023c-5738-427f-86a7-179af5a2be45" containerName="pruner" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.825942 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="81b3023c-5738-427f-86a7-179af5a2be45" containerName="pruner" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.826066 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="81b3023c-5738-427f-86a7-179af5a2be45" containerName="pruner" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.826529 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.828997 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.829696 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.832899 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.977601 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-var-lock\") pod \"installer-9-crc\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.977656 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/775ea46b-9a5d-41f7-8940-0cd040b95f58-kube-api-access\") pod \"installer-9-crc\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:18 crc kubenswrapper[5017]: I0122 14:06:18.977731 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-kubelet-dir\") pod \"installer-9-crc\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:19 crc kubenswrapper[5017]: I0122 14:06:19.079614 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-kubelet-dir\") pod \"installer-9-crc\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:19 crc kubenswrapper[5017]: I0122 14:06:19.079690 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-var-lock\") pod \"installer-9-crc\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:19 crc kubenswrapper[5017]: I0122 14:06:19.079714 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/775ea46b-9a5d-41f7-8940-0cd040b95f58-kube-api-access\") pod \"installer-9-crc\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:19 crc kubenswrapper[5017]: I0122 14:06:19.080130 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-var-lock\") pod \"installer-9-crc\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:19 crc kubenswrapper[5017]: I0122 14:06:19.080296 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-kubelet-dir\") pod \"installer-9-crc\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:19 crc kubenswrapper[5017]: I0122 14:06:19.097414 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/775ea46b-9a5d-41f7-8940-0cd040b95f58-kube-api-access\") pod \"installer-9-crc\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:19 crc kubenswrapper[5017]: I0122 14:06:19.160792 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:19 crc kubenswrapper[5017]: I0122 14:06:19.565425 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 14:06:20 crc kubenswrapper[5017]: I0122 14:06:20.141892 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"775ea46b-9a5d-41f7-8940-0cd040b95f58","Type":"ContainerStarted","Data":"0e0bddc1559ae6617a191f34eb8e3ea1fc34db1833235682b0c1babee4dd60db"} Jan 22 14:06:21 crc kubenswrapper[5017]: I0122 14:06:21.148188 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"775ea46b-9a5d-41f7-8940-0cd040b95f58","Type":"ContainerStarted","Data":"56c6c60d7fc852eb220843497ca03fad382f7be4dc897f680b2d591a2d4ca37e"} Jan 22 14:06:21 crc kubenswrapper[5017]: I0122 14:06:21.167862 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=3.167842284 podStartE2EDuration="3.167842284s" podCreationTimestamp="2026-01-22 14:06:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:06:21.162034757 +0000 UTC m=+196.154496615" watchObservedRunningTime="2026-01-22 14:06:21.167842284 +0000 UTC m=+196.160304142" Jan 22 14:06:22 crc kubenswrapper[5017]: I0122 14:06:22.883549 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:06:22 crc kubenswrapper[5017]: I0122 14:06:22.883943 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.188815 5017 generic.go:334] "Generic (PLEG): container finished" podID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerID="e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482" exitCode=0 Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.188893 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h9dv2" event={"ID":"c820f9cc-89d7-40d4-9113-8ac2982a3bbf","Type":"ContainerDied","Data":"e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482"} Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.196938 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c5j96" event={"ID":"65614518-ced0-4d7b-86f0-2ae04bbcb58a","Type":"ContainerStarted","Data":"df596cc59b9c3a3e1d70448433204983bc9a4121bf6879f6e2b5e1fd74865477"} Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.200782 5017 generic.go:334] "Generic (PLEG): container finished" podID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerID="acf1b163b9ec984ccd8511601421e294ba63dcf0bad3311b128e7188c59969a3" exitCode=0 Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.200896 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9nbz" event={"ID":"5b6afb3f-3d0e-4c54-8fc9-c112742976cc","Type":"ContainerDied","Data":"acf1b163b9ec984ccd8511601421e294ba63dcf0bad3311b128e7188c59969a3"} Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.208547 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkvz7" event={"ID":"51b300c4-69d8-49d7-a5e4-ec3054ceed30","Type":"ContainerStarted","Data":"9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29"} Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.250266 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv"] Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.250495 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" podUID="a19914bd-125c-4066-9c3e-7b21b50a34c9" containerName="controller-manager" containerID="cri-o://99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99" gracePeriod=30 Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.268605 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl"] Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.268881 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" podUID="c1a1de47-f1f3-440c-bff6-d6299b56321a" containerName="route-controller-manager" containerID="cri-o://c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa" gracePeriod=30 Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.651666 5017 patch_prober.go:28] interesting pod/route-controller-manager-66ff769677-xznpl container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.55:8443/healthz\": dial tcp 10.217.0.55:8443: connect: connection refused" start-of-body= Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.652017 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" podUID="c1a1de47-f1f3-440c-bff6-d6299b56321a" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.55:8443/healthz\": dial tcp 10.217.0.55:8443: connect: connection refused" Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.865556 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.988540 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-client-ca\") pod \"a19914bd-125c-4066-9c3e-7b21b50a34c9\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.988584 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-config\") pod \"a19914bd-125c-4066-9c3e-7b21b50a34c9\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.988628 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-proxy-ca-bundles\") pod \"a19914bd-125c-4066-9c3e-7b21b50a34c9\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.988654 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ffs9\" (UniqueName: \"kubernetes.io/projected/a19914bd-125c-4066-9c3e-7b21b50a34c9-kube-api-access-4ffs9\") pod \"a19914bd-125c-4066-9c3e-7b21b50a34c9\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.988719 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a19914bd-125c-4066-9c3e-7b21b50a34c9-serving-cert\") pod \"a19914bd-125c-4066-9c3e-7b21b50a34c9\" (UID: \"a19914bd-125c-4066-9c3e-7b21b50a34c9\") " Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.989825 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a19914bd-125c-4066-9c3e-7b21b50a34c9" (UID: "a19914bd-125c-4066-9c3e-7b21b50a34c9"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.990491 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-client-ca" (OuterVolumeSpecName: "client-ca") pod "a19914bd-125c-4066-9c3e-7b21b50a34c9" (UID: "a19914bd-125c-4066-9c3e-7b21b50a34c9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.991810 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-config" (OuterVolumeSpecName: "config") pod "a19914bd-125c-4066-9c3e-7b21b50a34c9" (UID: "a19914bd-125c-4066-9c3e-7b21b50a34c9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:26 crc kubenswrapper[5017]: I0122 14:06:26.996026 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a19914bd-125c-4066-9c3e-7b21b50a34c9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a19914bd-125c-4066-9c3e-7b21b50a34c9" (UID: "a19914bd-125c-4066-9c3e-7b21b50a34c9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.001386 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a19914bd-125c-4066-9c3e-7b21b50a34c9-kube-api-access-4ffs9" (OuterVolumeSpecName: "kube-api-access-4ffs9") pod "a19914bd-125c-4066-9c3e-7b21b50a34c9" (UID: "a19914bd-125c-4066-9c3e-7b21b50a34c9"). InnerVolumeSpecName "kube-api-access-4ffs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.091961 5017 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.092458 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.092472 5017 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a19914bd-125c-4066-9c3e-7b21b50a34c9-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.092523 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ffs9\" (UniqueName: \"kubernetes.io/projected/a19914bd-125c-4066-9c3e-7b21b50a34c9-kube-api-access-4ffs9\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.092536 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a19914bd-125c-4066-9c3e-7b21b50a34c9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.123163 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.193247 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-client-ca\") pod \"c1a1de47-f1f3-440c-bff6-d6299b56321a\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.194383 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-config\") pod \"c1a1de47-f1f3-440c-bff6-d6299b56321a\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.194473 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1a1de47-f1f3-440c-bff6-d6299b56321a-serving-cert\") pod \"c1a1de47-f1f3-440c-bff6-d6299b56321a\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.194589 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sgcd\" (UniqueName: \"kubernetes.io/projected/c1a1de47-f1f3-440c-bff6-d6299b56321a-kube-api-access-7sgcd\") pod \"c1a1de47-f1f3-440c-bff6-d6299b56321a\" (UID: \"c1a1de47-f1f3-440c-bff6-d6299b56321a\") " Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.194138 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-client-ca" (OuterVolumeSpecName: "client-ca") pod "c1a1de47-f1f3-440c-bff6-d6299b56321a" (UID: "c1a1de47-f1f3-440c-bff6-d6299b56321a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.194785 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-config" (OuterVolumeSpecName: "config") pod "c1a1de47-f1f3-440c-bff6-d6299b56321a" (UID: "c1a1de47-f1f3-440c-bff6-d6299b56321a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.194960 5017 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.195027 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1a1de47-f1f3-440c-bff6-d6299b56321a-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.199232 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1a1de47-f1f3-440c-bff6-d6299b56321a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c1a1de47-f1f3-440c-bff6-d6299b56321a" (UID: "c1a1de47-f1f3-440c-bff6-d6299b56321a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.199503 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1a1de47-f1f3-440c-bff6-d6299b56321a-kube-api-access-7sgcd" (OuterVolumeSpecName: "kube-api-access-7sgcd") pod "c1a1de47-f1f3-440c-bff6-d6299b56321a" (UID: "c1a1de47-f1f3-440c-bff6-d6299b56321a"). InnerVolumeSpecName "kube-api-access-7sgcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.220301 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9nbz" event={"ID":"5b6afb3f-3d0e-4c54-8fc9-c112742976cc","Type":"ContainerStarted","Data":"a7e5f1818f749cdf17a3dd23b3e5ffea22b4d656dbc1b4f2214c2e2e1a37b147"} Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.222842 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx7qv" event={"ID":"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54","Type":"ContainerStarted","Data":"c3f4b4e44949092f95cc9fda6c45132e1ce8aa2d3a5a5bea62d13da744e2d2cd"} Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.226825 5017 generic.go:334] "Generic (PLEG): container finished" podID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerID="9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29" exitCode=0 Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.226913 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkvz7" event={"ID":"51b300c4-69d8-49d7-a5e4-ec3054ceed30","Type":"ContainerDied","Data":"9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29"} Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.229343 5017 generic.go:334] "Generic (PLEG): container finished" podID="c1a1de47-f1f3-440c-bff6-d6299b56321a" containerID="c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa" exitCode=0 Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.229447 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.229458 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" event={"ID":"c1a1de47-f1f3-440c-bff6-d6299b56321a","Type":"ContainerDied","Data":"c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa"} Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.229602 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl" event={"ID":"c1a1de47-f1f3-440c-bff6-d6299b56321a","Type":"ContainerDied","Data":"a10316599d370052daabc4d367f508ed5cca0b907250166fca2e6855e25fe262"} Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.229634 5017 scope.go:117] "RemoveContainer" containerID="c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.237319 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h9dv2" event={"ID":"c820f9cc-89d7-40d4-9113-8ac2982a3bbf","Type":"ContainerStarted","Data":"25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1"} Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.243948 5017 generic.go:334] "Generic (PLEG): container finished" podID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerID="df596cc59b9c3a3e1d70448433204983bc9a4121bf6879f6e2b5e1fd74865477" exitCode=0 Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.244040 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c5j96" event={"ID":"65614518-ced0-4d7b-86f0-2ae04bbcb58a","Type":"ContainerDied","Data":"df596cc59b9c3a3e1d70448433204983bc9a4121bf6879f6e2b5e1fd74865477"} Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.248280 5017 generic.go:334] "Generic (PLEG): container finished" podID="a19914bd-125c-4066-9c3e-7b21b50a34c9" containerID="99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99" exitCode=0 Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.248329 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" event={"ID":"a19914bd-125c-4066-9c3e-7b21b50a34c9","Type":"ContainerDied","Data":"99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99"} Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.248361 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" event={"ID":"a19914bd-125c-4066-9c3e-7b21b50a34c9","Type":"ContainerDied","Data":"6d3559834d933c264b2e68a3ac89a48f0ff673e99f8fa820fb9e3411b488c4b0"} Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.248421 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.249329 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s9nbz" podStartSLOduration=3.91936639 podStartE2EDuration="58.249309654s" podCreationTimestamp="2026-01-22 14:05:29 +0000 UTC" firstStartedPulling="2026-01-22 14:05:32.587626634 +0000 UTC m=+147.580088492" lastFinishedPulling="2026-01-22 14:06:26.917569898 +0000 UTC m=+201.910031756" observedRunningTime="2026-01-22 14:06:27.246505321 +0000 UTC m=+202.238967179" watchObservedRunningTime="2026-01-22 14:06:27.249309654 +0000 UTC m=+202.241771512" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.257139 5017 scope.go:117] "RemoveContainer" containerID="c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa" Jan 22 14:06:27 crc kubenswrapper[5017]: E0122 14:06:27.258511 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa\": container with ID starting with c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa not found: ID does not exist" containerID="c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.258569 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa"} err="failed to get container status \"c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa\": rpc error: code = NotFound desc = could not find container \"c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa\": container with ID starting with c56e4034bece0df26a19cebe1d687446d48b5fc73bae8bb3c20342e5a21b49aa not found: ID does not exist" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.258649 5017 scope.go:117] "RemoveContainer" containerID="99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.288210 5017 scope.go:117] "RemoveContainer" containerID="99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99" Jan 22 14:06:27 crc kubenswrapper[5017]: E0122 14:06:27.288721 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99\": container with ID starting with 99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99 not found: ID does not exist" containerID="99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.288752 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99"} err="failed to get container status \"99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99\": rpc error: code = NotFound desc = could not find container \"99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99\": container with ID starting with 99b8882b01bc3bed15eeec8f1224e3d8e1c06abec6b619dfcb4ca0ccd0031b99 not found: ID does not exist" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.304042 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1a1de47-f1f3-440c-bff6-d6299b56321a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.304131 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sgcd\" (UniqueName: \"kubernetes.io/projected/c1a1de47-f1f3-440c-bff6-d6299b56321a-kube-api-access-7sgcd\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.338406 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h9dv2" podStartSLOduration=2.206561725 podStartE2EDuration="56.33838918s" podCreationTimestamp="2026-01-22 14:05:31 +0000 UTC" firstStartedPulling="2026-01-22 14:05:32.586254065 +0000 UTC m=+147.578715923" lastFinishedPulling="2026-01-22 14:06:26.71808153 +0000 UTC m=+201.710543378" observedRunningTime="2026-01-22 14:06:27.337928126 +0000 UTC m=+202.330389984" watchObservedRunningTime="2026-01-22 14:06:27.33838918 +0000 UTC m=+202.330851038" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.348808 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl"] Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.353986 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66ff769677-xznpl"] Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.409416 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv"] Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.416353 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv"] Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.561518 5017 patch_prober.go:28] interesting pod/controller-manager-5f74fdcb55-wm9dv container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.54:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.561622 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-5f74fdcb55-wm9dv" podUID="a19914bd-125c-4066-9c3e-7b21b50a34c9" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.54:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.656244 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-557c58b94b-g9tvg"] Jan 22 14:06:27 crc kubenswrapper[5017]: E0122 14:06:27.656654 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a19914bd-125c-4066-9c3e-7b21b50a34c9" containerName="controller-manager" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.656680 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="a19914bd-125c-4066-9c3e-7b21b50a34c9" containerName="controller-manager" Jan 22 14:06:27 crc kubenswrapper[5017]: E0122 14:06:27.656712 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1a1de47-f1f3-440c-bff6-d6299b56321a" containerName="route-controller-manager" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.656723 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1a1de47-f1f3-440c-bff6-d6299b56321a" containerName="route-controller-manager" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.656863 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1a1de47-f1f3-440c-bff6-d6299b56321a" containerName="route-controller-manager" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.656883 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="a19914bd-125c-4066-9c3e-7b21b50a34c9" containerName="controller-manager" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.657466 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.659067 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr"] Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.659832 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.660907 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.661260 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.661465 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.662060 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.662183 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.662126 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.662140 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.667453 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.668302 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.668525 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.670528 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.670732 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.723987 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-557c58b94b-g9tvg"] Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.725083 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.728025 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr"] Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.822052 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/296a5394-adf8-4d59-9734-724d6d628e88-serving-cert\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.822125 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-config\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.822226 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51689a15-199a-4ffd-96ad-927b044f222b-serving-cert\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.822314 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-client-ca\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.822348 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bznvl\" (UniqueName: \"kubernetes.io/projected/296a5394-adf8-4d59-9734-724d6d628e88-kube-api-access-bznvl\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.822726 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-config\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.822755 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbvmb\" (UniqueName: \"kubernetes.io/projected/51689a15-199a-4ffd-96ad-927b044f222b-kube-api-access-dbvmb\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.822774 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-proxy-ca-bundles\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.822972 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-client-ca\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.924066 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-client-ca\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.924141 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bznvl\" (UniqueName: \"kubernetes.io/projected/296a5394-adf8-4d59-9734-724d6d628e88-kube-api-access-bznvl\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.924212 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-config\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.924236 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbvmb\" (UniqueName: \"kubernetes.io/projected/51689a15-199a-4ffd-96ad-927b044f222b-kube-api-access-dbvmb\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.924251 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-proxy-ca-bundles\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.924284 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-client-ca\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.924299 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/296a5394-adf8-4d59-9734-724d6d628e88-serving-cert\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.924319 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-config\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.924338 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51689a15-199a-4ffd-96ad-927b044f222b-serving-cert\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.926279 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-client-ca\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.926630 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-config\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.927465 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-config\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.928086 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-client-ca\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.928236 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51689a15-199a-4ffd-96ad-927b044f222b-serving-cert\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.928765 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-proxy-ca-bundles\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.932778 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/296a5394-adf8-4d59-9734-724d6d628e88-serving-cert\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.948398 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bznvl\" (UniqueName: \"kubernetes.io/projected/296a5394-adf8-4d59-9734-724d6d628e88-kube-api-access-bznvl\") pod \"route-controller-manager-fcdcfd659-prcdr\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:27 crc kubenswrapper[5017]: I0122 14:06:27.948921 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbvmb\" (UniqueName: \"kubernetes.io/projected/51689a15-199a-4ffd-96ad-927b044f222b-kube-api-access-dbvmb\") pod \"controller-manager-557c58b94b-g9tvg\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:28 crc kubenswrapper[5017]: I0122 14:06:28.029353 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:28 crc kubenswrapper[5017]: I0122 14:06:28.045307 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:28 crc kubenswrapper[5017]: I0122 14:06:28.273862 5017 generic.go:334] "Generic (PLEG): container finished" podID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerID="c3f4b4e44949092f95cc9fda6c45132e1ce8aa2d3a5a5bea62d13da744e2d2cd" exitCode=0 Jan 22 14:06:28 crc kubenswrapper[5017]: I0122 14:06:28.274055 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx7qv" event={"ID":"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54","Type":"ContainerDied","Data":"c3f4b4e44949092f95cc9fda6c45132e1ce8aa2d3a5a5bea62d13da744e2d2cd"} Jan 22 14:06:28 crc kubenswrapper[5017]: I0122 14:06:28.325791 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr"] Jan 22 14:06:28 crc kubenswrapper[5017]: W0122 14:06:28.334705 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod296a5394_adf8_4d59_9734_724d6d628e88.slice/crio-db579b7148fb8e4ef02f1a61d9af6917487983ac303a23f615733e304352370d WatchSource:0}: Error finding container db579b7148fb8e4ef02f1a61d9af6917487983ac303a23f615733e304352370d: Status 404 returned error can't find the container with id db579b7148fb8e4ef02f1a61d9af6917487983ac303a23f615733e304352370d Jan 22 14:06:28 crc kubenswrapper[5017]: I0122 14:06:28.480976 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-557c58b94b-g9tvg"] Jan 22 14:06:28 crc kubenswrapper[5017]: W0122 14:06:28.493332 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51689a15_199a_4ffd_96ad_927b044f222b.slice/crio-ec78b87f66d7098cd097355239da0f514ac8cf85b48986fb0e0a724d089cc281 WatchSource:0}: Error finding container ec78b87f66d7098cd097355239da0f514ac8cf85b48986fb0e0a724d089cc281: Status 404 returned error can't find the container with id ec78b87f66d7098cd097355239da0f514ac8cf85b48986fb0e0a724d089cc281 Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.271969 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a19914bd-125c-4066-9c3e-7b21b50a34c9" path="/var/lib/kubelet/pods/a19914bd-125c-4066-9c3e-7b21b50a34c9/volumes" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.273304 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1a1de47-f1f3-440c-bff6-d6299b56321a" path="/var/lib/kubelet/pods/c1a1de47-f1f3-440c-bff6-d6299b56321a/volumes" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.282771 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c5j96" event={"ID":"65614518-ced0-4d7b-86f0-2ae04bbcb58a","Type":"ContainerStarted","Data":"037b9c00f02213d27e525b04e5ba8bc120ab689ae87c99f53740eaca91826f4d"} Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.284786 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9f9c6" event={"ID":"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1","Type":"ContainerStarted","Data":"6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d"} Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.286443 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9nd6" event={"ID":"a773a1c5-5756-429a-950d-7e8b8c95341f","Type":"ContainerStarted","Data":"9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99"} Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.289016 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx7qv" event={"ID":"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54","Type":"ContainerStarted","Data":"f73ae92f68ca1750068274b0689e3bdae8de16672af8771b203323a1ce172f40"} Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.291067 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" event={"ID":"51689a15-199a-4ffd-96ad-927b044f222b","Type":"ContainerStarted","Data":"b1d746b33989ffb38841b346242bb1d457777dad04a25165c1d4eb987d25ed3c"} Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.291105 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" event={"ID":"51689a15-199a-4ffd-96ad-927b044f222b","Type":"ContainerStarted","Data":"ec78b87f66d7098cd097355239da0f514ac8cf85b48986fb0e0a724d089cc281"} Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.291894 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.294512 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkvz7" event={"ID":"51b300c4-69d8-49d7-a5e4-ec3054ceed30","Type":"ContainerStarted","Data":"486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25"} Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.296829 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" event={"ID":"296a5394-adf8-4d59-9734-724d6d628e88","Type":"ContainerStarted","Data":"198ac5ed5de09317b32d76467e18a57953d8cd6b1536771ee6e70c28a25566fa"} Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.296870 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" event={"ID":"296a5394-adf8-4d59-9734-724d6d628e88","Type":"ContainerStarted","Data":"db579b7148fb8e4ef02f1a61d9af6917487983ac303a23f615733e304352370d"} Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.297609 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.300280 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xhq9v" event={"ID":"194b80b4-3a8d-4530-8581-53cc45997936","Type":"ContainerStarted","Data":"4825ec292c4a5718f69e2c2a3f9f585aa369e6a2759c8229315f46c7d9b27e6c"} Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.319538 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.323998 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-c5j96" podStartSLOduration=4.010763509 podStartE2EDuration="58.323981149s" podCreationTimestamp="2026-01-22 14:05:31 +0000 UTC" firstStartedPulling="2026-01-22 14:05:33.652382076 +0000 UTC m=+148.644843934" lastFinishedPulling="2026-01-22 14:06:27.965599726 +0000 UTC m=+202.958061574" observedRunningTime="2026-01-22 14:06:29.322297969 +0000 UTC m=+204.314759827" watchObservedRunningTime="2026-01-22 14:06:29.323981149 +0000 UTC m=+204.316443008" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.387336 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" podStartSLOduration=3.387316385 podStartE2EDuration="3.387316385s" podCreationTimestamp="2026-01-22 14:06:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:06:29.385988205 +0000 UTC m=+204.378450063" watchObservedRunningTime="2026-01-22 14:06:29.387316385 +0000 UTC m=+204.379778243" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.476356 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jkvz7" podStartSLOduration=5.077235889 podStartE2EDuration="1m0.476334168s" podCreationTimestamp="2026-01-22 14:05:29 +0000 UTC" firstStartedPulling="2026-01-22 14:05:32.552348043 +0000 UTC m=+147.544809901" lastFinishedPulling="2026-01-22 14:06:27.951446322 +0000 UTC m=+202.943908180" observedRunningTime="2026-01-22 14:06:29.468674109 +0000 UTC m=+204.461135967" watchObservedRunningTime="2026-01-22 14:06:29.476334168 +0000 UTC m=+204.468796036" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.496829 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" podStartSLOduration=3.496809111 podStartE2EDuration="3.496809111s" podCreationTimestamp="2026-01-22 14:06:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:06:29.49177947 +0000 UTC m=+204.484241348" watchObservedRunningTime="2026-01-22 14:06:29.496809111 +0000 UTC m=+204.489270969" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.520656 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.525873 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rx7qv" podStartSLOduration=3.274271122 podStartE2EDuration="59.52585805s" podCreationTimestamp="2026-01-22 14:05:30 +0000 UTC" firstStartedPulling="2026-01-22 14:05:32.537885678 +0000 UTC m=+147.530347536" lastFinishedPulling="2026-01-22 14:06:28.789472606 +0000 UTC m=+203.781934464" observedRunningTime="2026-01-22 14:06:29.525387746 +0000 UTC m=+204.517849614" watchObservedRunningTime="2026-01-22 14:06:29.52585805 +0000 UTC m=+204.518319898" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.964354 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:06:29 crc kubenswrapper[5017]: I0122 14:06:29.964436 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:06:30 crc kubenswrapper[5017]: I0122 14:06:30.308529 5017 generic.go:334] "Generic (PLEG): container finished" podID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerID="6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d" exitCode=0 Jan 22 14:06:30 crc kubenswrapper[5017]: I0122 14:06:30.309213 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9f9c6" event={"ID":"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1","Type":"ContainerDied","Data":"6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d"} Jan 22 14:06:30 crc kubenswrapper[5017]: I0122 14:06:30.352715 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:06:30 crc kubenswrapper[5017]: I0122 14:06:30.352762 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:06:30 crc kubenswrapper[5017]: I0122 14:06:30.422071 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:06:30 crc kubenswrapper[5017]: I0122 14:06:30.533106 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:06:30 crc kubenswrapper[5017]: I0122 14:06:30.533224 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:06:31 crc kubenswrapper[5017]: I0122 14:06:31.072766 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-jkvz7" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerName="registry-server" probeResult="failure" output=< Jan 22 14:06:31 crc kubenswrapper[5017]: timeout: failed to connect service ":50051" within 1s Jan 22 14:06:31 crc kubenswrapper[5017]: > Jan 22 14:06:31 crc kubenswrapper[5017]: I0122 14:06:31.317509 5017 generic.go:334] "Generic (PLEG): container finished" podID="194b80b4-3a8d-4530-8581-53cc45997936" containerID="4825ec292c4a5718f69e2c2a3f9f585aa369e6a2759c8229315f46c7d9b27e6c" exitCode=0 Jan 22 14:06:31 crc kubenswrapper[5017]: I0122 14:06:31.317584 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xhq9v" event={"ID":"194b80b4-3a8d-4530-8581-53cc45997936","Type":"ContainerDied","Data":"4825ec292c4a5718f69e2c2a3f9f585aa369e6a2759c8229315f46c7d9b27e6c"} Jan 22 14:06:31 crc kubenswrapper[5017]: I0122 14:06:31.320049 5017 generic.go:334] "Generic (PLEG): container finished" podID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerID="9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99" exitCode=0 Jan 22 14:06:31 crc kubenswrapper[5017]: I0122 14:06:31.320132 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9nd6" event={"ID":"a773a1c5-5756-429a-950d-7e8b8c95341f","Type":"ContainerDied","Data":"9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99"} Jan 22 14:06:31 crc kubenswrapper[5017]: I0122 14:06:31.569145 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-rx7qv" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerName="registry-server" probeResult="failure" output=< Jan 22 14:06:31 crc kubenswrapper[5017]: timeout: failed to connect service ":50051" within 1s Jan 22 14:06:31 crc kubenswrapper[5017]: > Jan 22 14:06:31 crc kubenswrapper[5017]: I0122 14:06:31.883018 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:06:31 crc kubenswrapper[5017]: I0122 14:06:31.883078 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:06:31 crc kubenswrapper[5017]: I0122 14:06:31.925727 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:06:32 crc kubenswrapper[5017]: I0122 14:06:32.358559 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:06:32 crc kubenswrapper[5017]: I0122 14:06:32.358685 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:06:32 crc kubenswrapper[5017]: I0122 14:06:32.367575 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:06:32 crc kubenswrapper[5017]: I0122 14:06:32.408721 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:06:33 crc kubenswrapper[5017]: I0122 14:06:33.371590 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:06:36 crc kubenswrapper[5017]: I0122 14:06:36.701014 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c5j96"] Jan 22 14:06:36 crc kubenswrapper[5017]: I0122 14:06:36.701681 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-c5j96" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerName="registry-server" containerID="cri-o://037b9c00f02213d27e525b04e5ba8bc120ab689ae87c99f53740eaca91826f4d" gracePeriod=2 Jan 22 14:06:37 crc kubenswrapper[5017]: E0122 14:06:37.025672 5017 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65614518_ced0_4d7b_86f0_2ae04bbcb58a.slice/crio-037b9c00f02213d27e525b04e5ba8bc120ab689ae87c99f53740eaca91826f4d.scope\": RecentStats: unable to find data in memory cache]" Jan 22 14:06:38 crc kubenswrapper[5017]: I0122 14:06:38.357106 5017 generic.go:334] "Generic (PLEG): container finished" podID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerID="037b9c00f02213d27e525b04e5ba8bc120ab689ae87c99f53740eaca91826f4d" exitCode=0 Jan 22 14:06:38 crc kubenswrapper[5017]: I0122 14:06:38.357186 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c5j96" event={"ID":"65614518-ced0-4d7b-86f0-2ae04bbcb58a","Type":"ContainerDied","Data":"037b9c00f02213d27e525b04e5ba8bc120ab689ae87c99f53740eaca91826f4d"} Jan 22 14:06:38 crc kubenswrapper[5017]: I0122 14:06:38.884010 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.074367 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-utilities\") pod \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.074470 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-catalog-content\") pod \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.074511 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqj6x\" (UniqueName: \"kubernetes.io/projected/65614518-ced0-4d7b-86f0-2ae04bbcb58a-kube-api-access-kqj6x\") pod \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\" (UID: \"65614518-ced0-4d7b-86f0-2ae04bbcb58a\") " Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.075546 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-utilities" (OuterVolumeSpecName: "utilities") pod "65614518-ced0-4d7b-86f0-2ae04bbcb58a" (UID: "65614518-ced0-4d7b-86f0-2ae04bbcb58a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.079629 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65614518-ced0-4d7b-86f0-2ae04bbcb58a-kube-api-access-kqj6x" (OuterVolumeSpecName: "kube-api-access-kqj6x") pod "65614518-ced0-4d7b-86f0-2ae04bbcb58a" (UID: "65614518-ced0-4d7b-86f0-2ae04bbcb58a"). InnerVolumeSpecName "kube-api-access-kqj6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.111181 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "65614518-ced0-4d7b-86f0-2ae04bbcb58a" (UID: "65614518-ced0-4d7b-86f0-2ae04bbcb58a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.176352 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.176426 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65614518-ced0-4d7b-86f0-2ae04bbcb58a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.176465 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqj6x\" (UniqueName: \"kubernetes.io/projected/65614518-ced0-4d7b-86f0-2ae04bbcb58a-kube-api-access-kqj6x\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.364970 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c5j96" event={"ID":"65614518-ced0-4d7b-86f0-2ae04bbcb58a","Type":"ContainerDied","Data":"07dbbea1e35ea057ec169009dd44b6eaac139440c13f4067052cc411a390b4dc"} Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.365023 5017 scope.go:117] "RemoveContainer" containerID="037b9c00f02213d27e525b04e5ba8bc120ab689ae87c99f53740eaca91826f4d" Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.365062 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c5j96" Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.383937 5017 scope.go:117] "RemoveContainer" containerID="df596cc59b9c3a3e1d70448433204983bc9a4121bf6879f6e2b5e1fd74865477" Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.386090 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c5j96"] Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.390276 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-c5j96"] Jan 22 14:06:39 crc kubenswrapper[5017]: I0122 14:06:39.398726 5017 scope.go:117] "RemoveContainer" containerID="5aaf14bbc09c0158447fa7fd65598ff6d6460ec8318584da72d8c26103bfa1c3" Jan 22 14:06:40 crc kubenswrapper[5017]: I0122 14:06:40.003997 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:06:40 crc kubenswrapper[5017]: I0122 14:06:40.044774 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:06:40 crc kubenswrapper[5017]: I0122 14:06:40.377188 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9f9c6" event={"ID":"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1","Type":"ContainerStarted","Data":"8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d"} Jan 22 14:06:40 crc kubenswrapper[5017]: I0122 14:06:40.406611 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9f9c6" podStartSLOduration=3.983588001 podStartE2EDuration="1m7.406591878s" podCreationTimestamp="2026-01-22 14:05:33 +0000 UTC" firstStartedPulling="2026-01-22 14:05:34.684954225 +0000 UTC m=+149.677416083" lastFinishedPulling="2026-01-22 14:06:38.107958112 +0000 UTC m=+213.100419960" observedRunningTime="2026-01-22 14:06:40.401567568 +0000 UTC m=+215.394029426" watchObservedRunningTime="2026-01-22 14:06:40.406591878 +0000 UTC m=+215.399053736" Jan 22 14:06:40 crc kubenswrapper[5017]: I0122 14:06:40.411662 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:06:40 crc kubenswrapper[5017]: I0122 14:06:40.592221 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:06:40 crc kubenswrapper[5017]: I0122 14:06:40.634227 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:06:41 crc kubenswrapper[5017]: I0122 14:06:41.264788 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" path="/var/lib/kubelet/pods/65614518-ced0-4d7b-86f0-2ae04bbcb58a/volumes" Jan 22 14:06:42 crc kubenswrapper[5017]: I0122 14:06:42.299534 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s9nbz"] Jan 22 14:06:42 crc kubenswrapper[5017]: I0122 14:06:42.299794 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s9nbz" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerName="registry-server" containerID="cri-o://a7e5f1818f749cdf17a3dd23b3e5ffea22b4d656dbc1b4f2214c2e2e1a37b147" gracePeriod=2 Jan 22 14:06:42 crc kubenswrapper[5017]: I0122 14:06:42.391066 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9nd6" event={"ID":"a773a1c5-5756-429a-950d-7e8b8c95341f","Type":"ContainerStarted","Data":"a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f"} Jan 22 14:06:42 crc kubenswrapper[5017]: I0122 14:06:42.412471 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v9nd6" podStartSLOduration=5.195518916 podStartE2EDuration="1m13.412450284s" podCreationTimestamp="2026-01-22 14:05:29 +0000 UTC" firstStartedPulling="2026-01-22 14:05:32.528679234 +0000 UTC m=+147.521141092" lastFinishedPulling="2026-01-22 14:06:40.745610602 +0000 UTC m=+215.738072460" observedRunningTime="2026-01-22 14:06:42.40828161 +0000 UTC m=+217.400743488" watchObservedRunningTime="2026-01-22 14:06:42.412450284 +0000 UTC m=+217.404912142" Jan 22 14:06:43 crc kubenswrapper[5017]: I0122 14:06:43.400344 5017 generic.go:334] "Generic (PLEG): container finished" podID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerID="a7e5f1818f749cdf17a3dd23b3e5ffea22b4d656dbc1b4f2214c2e2e1a37b147" exitCode=0 Jan 22 14:06:43 crc kubenswrapper[5017]: I0122 14:06:43.400423 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9nbz" event={"ID":"5b6afb3f-3d0e-4c54-8fc9-c112742976cc","Type":"ContainerDied","Data":"a7e5f1818f749cdf17a3dd23b3e5ffea22b4d656dbc1b4f2214c2e2e1a37b147"} Jan 22 14:06:43 crc kubenswrapper[5017]: I0122 14:06:43.685695 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:06:43 crc kubenswrapper[5017]: I0122 14:06:43.685748 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:06:44 crc kubenswrapper[5017]: I0122 14:06:44.700443 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rx7qv"] Jan 22 14:06:44 crc kubenswrapper[5017]: I0122 14:06:44.700693 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rx7qv" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerName="registry-server" containerID="cri-o://f73ae92f68ca1750068274b0689e3bdae8de16672af8771b203323a1ce172f40" gracePeriod=2 Jan 22 14:06:44 crc kubenswrapper[5017]: I0122 14:06:44.725411 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9f9c6" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerName="registry-server" probeResult="failure" output=< Jan 22 14:06:44 crc kubenswrapper[5017]: timeout: failed to connect service ":50051" within 1s Jan 22 14:06:44 crc kubenswrapper[5017]: > Jan 22 14:06:46 crc kubenswrapper[5017]: I0122 14:06:46.276223 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-557c58b94b-g9tvg"] Jan 22 14:06:46 crc kubenswrapper[5017]: I0122 14:06:46.277141 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" podUID="51689a15-199a-4ffd-96ad-927b044f222b" containerName="controller-manager" containerID="cri-o://b1d746b33989ffb38841b346242bb1d457777dad04a25165c1d4eb987d25ed3c" gracePeriod=30 Jan 22 14:06:46 crc kubenswrapper[5017]: I0122 14:06:46.370694 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr"] Jan 22 14:06:46 crc kubenswrapper[5017]: I0122 14:06:46.370896 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" podUID="296a5394-adf8-4d59-9734-724d6d628e88" containerName="route-controller-manager" containerID="cri-o://198ac5ed5de09317b32d76467e18a57953d8cd6b1536771ee6e70c28a25566fa" gracePeriod=30 Jan 22 14:06:46 crc kubenswrapper[5017]: I0122 14:06:46.414977 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xhq9v" event={"ID":"194b80b4-3a8d-4530-8581-53cc45997936","Type":"ContainerStarted","Data":"95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2"} Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.378371 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.425350 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9nbz" event={"ID":"5b6afb3f-3d0e-4c54-8fc9-c112742976cc","Type":"ContainerDied","Data":"c416cf00c1622d0b12ddd66c77e0b5771f916c2150c3f76ddd697a6d93783cf7"} Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.425393 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9nbz" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.425473 5017 scope.go:117] "RemoveContainer" containerID="a7e5f1818f749cdf17a3dd23b3e5ffea22b4d656dbc1b4f2214c2e2e1a37b147" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.429341 5017 generic.go:334] "Generic (PLEG): container finished" podID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerID="f73ae92f68ca1750068274b0689e3bdae8de16672af8771b203323a1ce172f40" exitCode=0 Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.429429 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx7qv" event={"ID":"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54","Type":"ContainerDied","Data":"f73ae92f68ca1750068274b0689e3bdae8de16672af8771b203323a1ce172f40"} Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.431296 5017 generic.go:334] "Generic (PLEG): container finished" podID="51689a15-199a-4ffd-96ad-927b044f222b" containerID="b1d746b33989ffb38841b346242bb1d457777dad04a25165c1d4eb987d25ed3c" exitCode=0 Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.431358 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" event={"ID":"51689a15-199a-4ffd-96ad-927b044f222b","Type":"ContainerDied","Data":"b1d746b33989ffb38841b346242bb1d457777dad04a25165c1d4eb987d25ed3c"} Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.433284 5017 generic.go:334] "Generic (PLEG): container finished" podID="296a5394-adf8-4d59-9734-724d6d628e88" containerID="198ac5ed5de09317b32d76467e18a57953d8cd6b1536771ee6e70c28a25566fa" exitCode=0 Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.433531 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" event={"ID":"296a5394-adf8-4d59-9734-724d6d628e88","Type":"ContainerDied","Data":"198ac5ed5de09317b32d76467e18a57953d8cd6b1536771ee6e70c28a25566fa"} Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.442571 5017 scope.go:117] "RemoveContainer" containerID="acf1b163b9ec984ccd8511601421e294ba63dcf0bad3311b128e7188c59969a3" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.460465 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xhq9v" podStartSLOduration=6.680643719 podStartE2EDuration="1m15.460442684s" podCreationTimestamp="2026-01-22 14:05:32 +0000 UTC" firstStartedPulling="2026-01-22 14:05:34.700414528 +0000 UTC m=+149.692876386" lastFinishedPulling="2026-01-22 14:06:43.480213493 +0000 UTC m=+218.472675351" observedRunningTime="2026-01-22 14:06:47.452940259 +0000 UTC m=+222.445402117" watchObservedRunningTime="2026-01-22 14:06:47.460442684 +0000 UTC m=+222.452904552" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.465125 5017 scope.go:117] "RemoveContainer" containerID="0b2653fdcc3e9b0ded9218ac25fd5680b692243f9ee52348b070d7ae725fb5f0" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.479826 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2929\" (UniqueName: \"kubernetes.io/projected/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-kube-api-access-k2929\") pod \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.479903 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-catalog-content\") pod \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.479947 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-utilities\") pod \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\" (UID: \"5b6afb3f-3d0e-4c54-8fc9-c112742976cc\") " Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.480938 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-utilities" (OuterVolumeSpecName: "utilities") pod "5b6afb3f-3d0e-4c54-8fc9-c112742976cc" (UID: "5b6afb3f-3d0e-4c54-8fc9-c112742976cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.485129 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-kube-api-access-k2929" (OuterVolumeSpecName: "kube-api-access-k2929") pod "5b6afb3f-3d0e-4c54-8fc9-c112742976cc" (UID: "5b6afb3f-3d0e-4c54-8fc9-c112742976cc"). InnerVolumeSpecName "kube-api-access-k2929". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.531840 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5b6afb3f-3d0e-4c54-8fc9-c112742976cc" (UID: "5b6afb3f-3d0e-4c54-8fc9-c112742976cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.581397 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.581432 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.581443 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2929\" (UniqueName: \"kubernetes.io/projected/5b6afb3f-3d0e-4c54-8fc9-c112742976cc-kube-api-access-k2929\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.767266 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s9nbz"] Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.769917 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s9nbz"] Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.946038 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.981150 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:47 crc kubenswrapper[5017]: I0122 14:06:47.990365 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.088922 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-config\") pod \"51689a15-199a-4ffd-96ad-927b044f222b\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.088988 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsx6c\" (UniqueName: \"kubernetes.io/projected/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-kube-api-access-nsx6c\") pod \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.089018 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-client-ca\") pod \"51689a15-199a-4ffd-96ad-927b044f222b\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.089047 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-proxy-ca-bundles\") pod \"51689a15-199a-4ffd-96ad-927b044f222b\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.089083 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-utilities\") pod \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.089171 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51689a15-199a-4ffd-96ad-927b044f222b-serving-cert\") pod \"51689a15-199a-4ffd-96ad-927b044f222b\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.089226 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bznvl\" (UniqueName: \"kubernetes.io/projected/296a5394-adf8-4d59-9734-724d6d628e88-kube-api-access-bznvl\") pod \"296a5394-adf8-4d59-9734-724d6d628e88\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.089257 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-catalog-content\") pod \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\" (UID: \"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.089288 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-config\") pod \"296a5394-adf8-4d59-9734-724d6d628e88\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.089312 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbvmb\" (UniqueName: \"kubernetes.io/projected/51689a15-199a-4ffd-96ad-927b044f222b-kube-api-access-dbvmb\") pod \"51689a15-199a-4ffd-96ad-927b044f222b\" (UID: \"51689a15-199a-4ffd-96ad-927b044f222b\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.090282 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-utilities" (OuterVolumeSpecName: "utilities") pod "8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" (UID: "8304b75b-b8b5-40c2-a63a-7ce2e5e2db54"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.090668 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-client-ca" (OuterVolumeSpecName: "client-ca") pod "51689a15-199a-4ffd-96ad-927b044f222b" (UID: "51689a15-199a-4ffd-96ad-927b044f222b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.091071 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "51689a15-199a-4ffd-96ad-927b044f222b" (UID: "51689a15-199a-4ffd-96ad-927b044f222b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.091438 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-config" (OuterVolumeSpecName: "config") pod "51689a15-199a-4ffd-96ad-927b044f222b" (UID: "51689a15-199a-4ffd-96ad-927b044f222b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.091476 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-config" (OuterVolumeSpecName: "config") pod "296a5394-adf8-4d59-9734-724d6d628e88" (UID: "296a5394-adf8-4d59-9734-724d6d628e88"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.093289 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-kube-api-access-nsx6c" (OuterVolumeSpecName: "kube-api-access-nsx6c") pod "8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" (UID: "8304b75b-b8b5-40c2-a63a-7ce2e5e2db54"). InnerVolumeSpecName "kube-api-access-nsx6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.101307 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/296a5394-adf8-4d59-9734-724d6d628e88-kube-api-access-bznvl" (OuterVolumeSpecName: "kube-api-access-bznvl") pod "296a5394-adf8-4d59-9734-724d6d628e88" (UID: "296a5394-adf8-4d59-9734-724d6d628e88"). InnerVolumeSpecName "kube-api-access-bznvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.101406 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51689a15-199a-4ffd-96ad-927b044f222b-kube-api-access-dbvmb" (OuterVolumeSpecName: "kube-api-access-dbvmb") pod "51689a15-199a-4ffd-96ad-927b044f222b" (UID: "51689a15-199a-4ffd-96ad-927b044f222b"). InnerVolumeSpecName "kube-api-access-dbvmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.101404 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51689a15-199a-4ffd-96ad-927b044f222b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "51689a15-199a-4ffd-96ad-927b044f222b" (UID: "51689a15-199a-4ffd-96ad-927b044f222b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.138401 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" (UID: "8304b75b-b8b5-40c2-a63a-7ce2e5e2db54"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.191664 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-client-ca\") pod \"296a5394-adf8-4d59-9734-724d6d628e88\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.191826 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/296a5394-adf8-4d59-9734-724d6d628e88-serving-cert\") pod \"296a5394-adf8-4d59-9734-724d6d628e88\" (UID: \"296a5394-adf8-4d59-9734-724d6d628e88\") " Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192063 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192085 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbvmb\" (UniqueName: \"kubernetes.io/projected/51689a15-199a-4ffd-96ad-927b044f222b-kube-api-access-dbvmb\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192097 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsx6c\" (UniqueName: \"kubernetes.io/projected/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-kube-api-access-nsx6c\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192106 5017 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192129 5017 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192137 5017 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/51689a15-199a-4ffd-96ad-927b044f222b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192145 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192153 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51689a15-199a-4ffd-96ad-927b044f222b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192161 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bznvl\" (UniqueName: \"kubernetes.io/projected/296a5394-adf8-4d59-9734-724d6d628e88-kube-api-access-bznvl\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192169 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.192241 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-client-ca" (OuterVolumeSpecName: "client-ca") pod "296a5394-adf8-4d59-9734-724d6d628e88" (UID: "296a5394-adf8-4d59-9734-724d6d628e88"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.194761 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/296a5394-adf8-4d59-9734-724d6d628e88-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "296a5394-adf8-4d59-9734-724d6d628e88" (UID: "296a5394-adf8-4d59-9734-724d6d628e88"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.293146 5017 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/296a5394-adf8-4d59-9734-724d6d628e88-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.293181 5017 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/296a5394-adf8-4d59-9734-724d6d628e88-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.443867 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx7qv" event={"ID":"8304b75b-b8b5-40c2-a63a-7ce2e5e2db54","Type":"ContainerDied","Data":"cd45fa2ee3cda7cf39769347be5695b879afe6c627db325a3f290bf2ecbe39d2"} Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.443961 5017 scope.go:117] "RemoveContainer" containerID="f73ae92f68ca1750068274b0689e3bdae8de16672af8771b203323a1ce172f40" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.443885 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx7qv" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.445210 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" event={"ID":"51689a15-199a-4ffd-96ad-927b044f222b","Type":"ContainerDied","Data":"ec78b87f66d7098cd097355239da0f514ac8cf85b48986fb0e0a724d089cc281"} Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.445226 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-557c58b94b-g9tvg" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.448473 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" event={"ID":"296a5394-adf8-4d59-9734-724d6d628e88","Type":"ContainerDied","Data":"db579b7148fb8e4ef02f1a61d9af6917487983ac303a23f615733e304352370d"} Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.448718 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.462457 5017 scope.go:117] "RemoveContainer" containerID="c3f4b4e44949092f95cc9fda6c45132e1ce8aa2d3a5a5bea62d13da744e2d2cd" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.498979 5017 scope.go:117] "RemoveContainer" containerID="dc48753f0e25991475439f7bd577cb82021cf6964dae56f44258147982ba63b2" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.526628 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rx7qv"] Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.531148 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rx7qv"] Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.536282 5017 scope.go:117] "RemoveContainer" containerID="b1d746b33989ffb38841b346242bb1d457777dad04a25165c1d4eb987d25ed3c" Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.536491 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-557c58b94b-g9tvg"] Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.539538 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-557c58b94b-g9tvg"] Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.549206 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr"] Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.551842 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fcdcfd659-prcdr"] Jan 22 14:06:48 crc kubenswrapper[5017]: I0122 14:06:48.555919 5017 scope.go:117] "RemoveContainer" containerID="198ac5ed5de09317b32d76467e18a57953d8cd6b1536771ee6e70c28a25566fa" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.266682 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="296a5394-adf8-4d59-9734-724d6d628e88" path="/var/lib/kubelet/pods/296a5394-adf8-4d59-9734-724d6d628e88/volumes" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.267432 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51689a15-199a-4ffd-96ad-927b044f222b" path="/var/lib/kubelet/pods/51689a15-199a-4ffd-96ad-927b044f222b/volumes" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.267926 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" path="/var/lib/kubelet/pods/5b6afb3f-3d0e-4c54-8fc9-c112742976cc/volumes" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.268993 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" path="/var/lib/kubelet/pods/8304b75b-b8b5-40c2-a63a-7ce2e5e2db54/volumes" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.671206 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb"] Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.671843 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51689a15-199a-4ffd-96ad-927b044f222b" containerName="controller-manager" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.671861 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="51689a15-199a-4ffd-96ad-927b044f222b" containerName="controller-manager" Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.671879 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerName="registry-server" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.671889 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerName="registry-server" Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.671900 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerName="extract-content" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.671907 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerName="extract-content" Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.671923 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerName="extract-utilities" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.671930 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerName="extract-utilities" Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.671940 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerName="registry-server" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.671948 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerName="registry-server" Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.671959 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerName="extract-utilities" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.671965 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerName="extract-utilities" Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.671976 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerName="extract-content" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.671984 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerName="extract-content" Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.671994 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerName="extract-utilities" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.672002 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerName="extract-utilities" Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.672013 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerName="extract-content" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.672020 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerName="extract-content" Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.672033 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerName="registry-server" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.672042 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerName="registry-server" Jan 22 14:06:49 crc kubenswrapper[5017]: E0122 14:06:49.672053 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296a5394-adf8-4d59-9734-724d6d628e88" containerName="route-controller-manager" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.672060 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="296a5394-adf8-4d59-9734-724d6d628e88" containerName="route-controller-manager" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.672195 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="65614518-ced0-4d7b-86f0-2ae04bbcb58a" containerName="registry-server" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.672210 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b6afb3f-3d0e-4c54-8fc9-c112742976cc" containerName="registry-server" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.672223 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="51689a15-199a-4ffd-96ad-927b044f222b" containerName="controller-manager" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.672232 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="8304b75b-b8b5-40c2-a63a-7ce2e5e2db54" containerName="registry-server" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.672245 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="296a5394-adf8-4d59-9734-724d6d628e88" containerName="route-controller-manager" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.672916 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.673764 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-685b46bcf8-bqg2s"] Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.674467 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.674921 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.677799 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.677992 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.678325 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.678567 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.679386 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.679762 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.681465 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.681673 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.681808 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.682249 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.682590 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.685415 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.687449 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-685b46bcf8-bqg2s"] Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.691039 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb"] Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.710254 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a57c321-c070-4311-9add-8dbc2cdee110-client-ca\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.710338 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e52de443-a36f-41bf-b7b3-2f7e4438b83a-client-ca\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.710367 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a57c321-c070-4311-9add-8dbc2cdee110-config\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.710383 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a57c321-c070-4311-9add-8dbc2cdee110-serving-cert\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.710408 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52de443-a36f-41bf-b7b3-2f7e4438b83a-config\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.710422 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e52de443-a36f-41bf-b7b3-2f7e4438b83a-serving-cert\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.710534 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wlwg\" (UniqueName: \"kubernetes.io/projected/3a57c321-c070-4311-9add-8dbc2cdee110-kube-api-access-8wlwg\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.710594 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8dkr\" (UniqueName: \"kubernetes.io/projected/e52de443-a36f-41bf-b7b3-2f7e4438b83a-kube-api-access-d8dkr\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.710676 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e52de443-a36f-41bf-b7b3-2f7e4438b83a-proxy-ca-bundles\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.811360 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52de443-a36f-41bf-b7b3-2f7e4438b83a-config\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.811422 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e52de443-a36f-41bf-b7b3-2f7e4438b83a-serving-cert\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.811460 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wlwg\" (UniqueName: \"kubernetes.io/projected/3a57c321-c070-4311-9add-8dbc2cdee110-kube-api-access-8wlwg\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.811482 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8dkr\" (UniqueName: \"kubernetes.io/projected/e52de443-a36f-41bf-b7b3-2f7e4438b83a-kube-api-access-d8dkr\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.811514 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e52de443-a36f-41bf-b7b3-2f7e4438b83a-proxy-ca-bundles\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.811549 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a57c321-c070-4311-9add-8dbc2cdee110-client-ca\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.811583 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e52de443-a36f-41bf-b7b3-2f7e4438b83a-client-ca\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.811613 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a57c321-c070-4311-9add-8dbc2cdee110-config\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.811634 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a57c321-c070-4311-9add-8dbc2cdee110-serving-cert\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.812695 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52de443-a36f-41bf-b7b3-2f7e4438b83a-config\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.813289 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e52de443-a36f-41bf-b7b3-2f7e4438b83a-proxy-ca-bundles\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.813955 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a57c321-c070-4311-9add-8dbc2cdee110-client-ca\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.814209 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a57c321-c070-4311-9add-8dbc2cdee110-config\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.814462 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e52de443-a36f-41bf-b7b3-2f7e4438b83a-client-ca\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.819946 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a57c321-c070-4311-9add-8dbc2cdee110-serving-cert\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.830939 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8dkr\" (UniqueName: \"kubernetes.io/projected/e52de443-a36f-41bf-b7b3-2f7e4438b83a-kube-api-access-d8dkr\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.831893 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e52de443-a36f-41bf-b7b3-2f7e4438b83a-serving-cert\") pod \"controller-manager-685b46bcf8-bqg2s\" (UID: \"e52de443-a36f-41bf-b7b3-2f7e4438b83a\") " pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.832079 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wlwg\" (UniqueName: \"kubernetes.io/projected/3a57c321-c070-4311-9add-8dbc2cdee110-kube-api-access-8wlwg\") pod \"route-controller-manager-dd85d8486-npdpb\" (UID: \"3a57c321-c070-4311-9add-8dbc2cdee110\") " pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:49 crc kubenswrapper[5017]: I0122 14:06:49.996987 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.006873 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.176961 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.177105 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.221254 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.245826 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-685b46bcf8-bqg2s"] Jan 22 14:06:50 crc kubenswrapper[5017]: W0122 14:06:50.250575 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode52de443_a36f_41bf_b7b3_2f7e4438b83a.slice/crio-572c5ba707c4af2bb79d64e13f296024023289a9e5e3211e99ef5c1a3d7c54d7 WatchSource:0}: Error finding container 572c5ba707c4af2bb79d64e13f296024023289a9e5e3211e99ef5c1a3d7c54d7: Status 404 returned error can't find the container with id 572c5ba707c4af2bb79d64e13f296024023289a9e5e3211e99ef5c1a3d7c54d7 Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.396258 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb"] Jan 22 14:06:50 crc kubenswrapper[5017]: W0122 14:06:50.408004 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a57c321_c070_4311_9add_8dbc2cdee110.slice/crio-f2a19122b8f0ba6cecc2ce113bc0d3090347e0f3787423e42ed1082dd06ebae3 WatchSource:0}: Error finding container f2a19122b8f0ba6cecc2ce113bc0d3090347e0f3787423e42ed1082dd06ebae3: Status 404 returned error can't find the container with id f2a19122b8f0ba6cecc2ce113bc0d3090347e0f3787423e42ed1082dd06ebae3 Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.464986 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" event={"ID":"3a57c321-c070-4311-9add-8dbc2cdee110","Type":"ContainerStarted","Data":"f2a19122b8f0ba6cecc2ce113bc0d3090347e0f3787423e42ed1082dd06ebae3"} Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.467409 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" event={"ID":"e52de443-a36f-41bf-b7b3-2f7e4438b83a","Type":"ContainerStarted","Data":"cb2ddbac1cda04ffba45dca4bddfada8ed621be989143d5c6a903a9b74c75995"} Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.467441 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" event={"ID":"e52de443-a36f-41bf-b7b3-2f7e4438b83a","Type":"ContainerStarted","Data":"572c5ba707c4af2bb79d64e13f296024023289a9e5e3211e99ef5c1a3d7c54d7"} Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.489262 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" podStartSLOduration=4.489237897 podStartE2EDuration="4.489237897s" podCreationTimestamp="2026-01-22 14:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:06:50.485931528 +0000 UTC m=+225.478393396" watchObservedRunningTime="2026-01-22 14:06:50.489237897 +0000 UTC m=+225.481699755" Jan 22 14:06:50 crc kubenswrapper[5017]: I0122 14:06:50.524617 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:06:51 crc kubenswrapper[5017]: I0122 14:06:51.477659 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" event={"ID":"3a57c321-c070-4311-9add-8dbc2cdee110","Type":"ContainerStarted","Data":"f84e219277a15a1ed5ed4e43b58ad03a5abae34c6fc635755acc64f89f077cdd"} Jan 22 14:06:51 crc kubenswrapper[5017]: I0122 14:06:51.478346 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:51 crc kubenswrapper[5017]: I0122 14:06:51.478385 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:51 crc kubenswrapper[5017]: I0122 14:06:51.483518 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-685b46bcf8-bqg2s" Jan 22 14:06:51 crc kubenswrapper[5017]: I0122 14:06:51.485580 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" Jan 22 14:06:51 crc kubenswrapper[5017]: I0122 14:06:51.497105 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-dd85d8486-npdpb" podStartSLOduration=5.497082623 podStartE2EDuration="5.497082623s" podCreationTimestamp="2026-01-22 14:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:06:51.494612639 +0000 UTC m=+226.487074507" watchObservedRunningTime="2026-01-22 14:06:51.497082623 +0000 UTC m=+226.489544481" Jan 22 14:06:52 crc kubenswrapper[5017]: I0122 14:06:52.883659 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:06:52 crc kubenswrapper[5017]: I0122 14:06:52.883738 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:06:52 crc kubenswrapper[5017]: I0122 14:06:52.883790 5017 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:06:52 crc kubenswrapper[5017]: I0122 14:06:52.884752 5017 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5"} pod="openshift-machine-config-operator/machine-config-daemon-cr62h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:06:52 crc kubenswrapper[5017]: I0122 14:06:52.884827 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" containerID="cri-o://dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5" gracePeriod=600 Jan 22 14:06:53 crc kubenswrapper[5017]: I0122 14:06:53.328845 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:06:53 crc kubenswrapper[5017]: I0122 14:06:53.329343 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:06:53 crc kubenswrapper[5017]: I0122 14:06:53.373127 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:06:53 crc kubenswrapper[5017]: I0122 14:06:53.391178 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7pt94"] Jan 22 14:06:53 crc kubenswrapper[5017]: I0122 14:06:53.490524 5017 generic.go:334] "Generic (PLEG): container finished" podID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerID="dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5" exitCode=0 Jan 22 14:06:53 crc kubenswrapper[5017]: I0122 14:06:53.490605 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerDied","Data":"dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5"} Jan 22 14:06:53 crc kubenswrapper[5017]: I0122 14:06:53.491195 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerStarted","Data":"558dea29af1da1143db23fb238318c80a09e247580c9fbabc7f06ab287879a19"} Jan 22 14:06:53 crc kubenswrapper[5017]: I0122 14:06:53.536658 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:06:53 crc kubenswrapper[5017]: I0122 14:06:53.727551 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:06:53 crc kubenswrapper[5017]: I0122 14:06:53.765841 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:06:54 crc kubenswrapper[5017]: I0122 14:06:54.509289 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9f9c6"] Jan 22 14:06:55 crc kubenswrapper[5017]: I0122 14:06:55.514958 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9f9c6" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerName="registry-server" containerID="cri-o://8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d" gracePeriod=2 Jan 22 14:06:55 crc kubenswrapper[5017]: I0122 14:06:55.999137 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.194464 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-catalog-content\") pod \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.194560 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fffbl\" (UniqueName: \"kubernetes.io/projected/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-kube-api-access-fffbl\") pod \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.194585 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-utilities\") pod \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\" (UID: \"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1\") " Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.195815 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-utilities" (OuterVolumeSpecName: "utilities") pod "bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" (UID: "bffa30e4-e5f8-4888-9731-dfd3e4b74cd1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.201787 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-kube-api-access-fffbl" (OuterVolumeSpecName: "kube-api-access-fffbl") pod "bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" (UID: "bffa30e4-e5f8-4888-9731-dfd3e4b74cd1"). InnerVolumeSpecName "kube-api-access-fffbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.295821 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fffbl\" (UniqueName: \"kubernetes.io/projected/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-kube-api-access-fffbl\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.295858 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.333569 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" (UID: "bffa30e4-e5f8-4888-9731-dfd3e4b74cd1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.397773 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.524876 5017 generic.go:334] "Generic (PLEG): container finished" podID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerID="8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d" exitCode=0 Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.524920 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9f9c6" event={"ID":"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1","Type":"ContainerDied","Data":"8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d"} Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.524952 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9f9c6" event={"ID":"bffa30e4-e5f8-4888-9731-dfd3e4b74cd1","Type":"ContainerDied","Data":"9c15ff7482028defb8dada9f9b93d62eb4b40dd14d2120d1af13e70c8516e3a6"} Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.524981 5017 scope.go:117] "RemoveContainer" containerID="8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.524997 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9f9c6" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.542470 5017 scope.go:117] "RemoveContainer" containerID="6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.556626 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9f9c6"] Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.561453 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9f9c6"] Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.580907 5017 scope.go:117] "RemoveContainer" containerID="457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.594270 5017 scope.go:117] "RemoveContainer" containerID="8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d" Jan 22 14:06:56 crc kubenswrapper[5017]: E0122 14:06:56.594658 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d\": container with ID starting with 8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d not found: ID does not exist" containerID="8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.594692 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d"} err="failed to get container status \"8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d\": rpc error: code = NotFound desc = could not find container \"8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d\": container with ID starting with 8e01a40c373977d03ee296b0082e06c8219261098287df1b4315938b24e1834d not found: ID does not exist" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.594712 5017 scope.go:117] "RemoveContainer" containerID="6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d" Jan 22 14:06:56 crc kubenswrapper[5017]: E0122 14:06:56.594957 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d\": container with ID starting with 6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d not found: ID does not exist" containerID="6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.594980 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d"} err="failed to get container status \"6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d\": rpc error: code = NotFound desc = could not find container \"6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d\": container with ID starting with 6e6890e671041b4e43f3fe9b6e3b43d3eefb91f0f566813f15c993747a6c813d not found: ID does not exist" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.594992 5017 scope.go:117] "RemoveContainer" containerID="457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f" Jan 22 14:06:56 crc kubenswrapper[5017]: E0122 14:06:56.595207 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f\": container with ID starting with 457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f not found: ID does not exist" containerID="457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f" Jan 22 14:06:56 crc kubenswrapper[5017]: I0122 14:06:56.595228 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f"} err="failed to get container status \"457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f\": rpc error: code = NotFound desc = could not find container \"457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f\": container with ID starting with 457bfc865c18b229c3feab03a5ae9d2190854f6a964b30f158e18a8a4d19a18f not found: ID does not exist" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.266263 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" path="/var/lib/kubelet/pods/bffa30e4-e5f8-4888-9731-dfd3e4b74cd1/volumes" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.874024 5017 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 14:06:57 crc kubenswrapper[5017]: E0122 14:06:57.874660 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerName="extract-content" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.874676 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerName="extract-content" Jan 22 14:06:57 crc kubenswrapper[5017]: E0122 14:06:57.874686 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerName="extract-utilities" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.874692 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerName="extract-utilities" Jan 22 14:06:57 crc kubenswrapper[5017]: E0122 14:06:57.874702 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerName="registry-server" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.874708 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerName="registry-server" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.874801 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="bffa30e4-e5f8-4888-9731-dfd3e4b74cd1" containerName="registry-server" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.875097 5017 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.875434 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570" gracePeriod=15 Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.875460 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673" gracePeriod=15 Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.875492 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb" gracePeriod=15 Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.875492 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695" gracePeriod=15 Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.875395 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c" gracePeriod=15 Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.875436 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.877941 5017 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 14:06:57 crc kubenswrapper[5017]: E0122 14:06:57.878888 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.878982 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 14:06:57 crc kubenswrapper[5017]: E0122 14:06:57.879005 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.879011 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 14:06:57 crc kubenswrapper[5017]: E0122 14:06:57.879018 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.879028 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 14:06:57 crc kubenswrapper[5017]: E0122 14:06:57.879039 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.879045 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 14:06:57 crc kubenswrapper[5017]: E0122 14:06:57.879058 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.879178 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 14:06:57 crc kubenswrapper[5017]: E0122 14:06:57.879190 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.879196 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.880407 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.880428 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.880440 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.880448 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.880475 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.918078 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.918147 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.918321 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.918469 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.918504 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.918529 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.918579 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:06:57 crc kubenswrapper[5017]: I0122 14:06:57.918619 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:57 crc kubenswrapper[5017]: E0122 14:06:57.984139 5017 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.129.56.235:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.018996 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019096 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019163 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019230 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019275 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019314 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019336 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019460 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019488 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019508 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019566 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019377 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019605 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019401 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019641 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.019425 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.301221 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:58 crc kubenswrapper[5017]: W0122 14:06:58.322436 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-e7575c226cf612ca219473cf8470b2a948b8800c40003a3029fdfd805d695b57 WatchSource:0}: Error finding container e7575c226cf612ca219473cf8470b2a948b8800c40003a3029fdfd805d695b57: Status 404 returned error can't find the container with id e7575c226cf612ca219473cf8470b2a948b8800c40003a3029fdfd805d695b57 Jan 22 14:06:58 crc kubenswrapper[5017]: E0122 14:06:58.326654 5017 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.235:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188d12b9e9b973a1 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 14:06:58.325967777 +0000 UTC m=+233.318429635,LastTimestamp:2026-01-22 14:06:58.325967777 +0000 UTC m=+233.318429635,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.547313 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"e7575c226cf612ca219473cf8470b2a948b8800c40003a3029fdfd805d695b57"} Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.549197 5017 generic.go:334] "Generic (PLEG): container finished" podID="775ea46b-9a5d-41f7-8940-0cd040b95f58" containerID="56c6c60d7fc852eb220843497ca03fad382f7be4dc897f680b2d591a2d4ca37e" exitCode=0 Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.549265 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"775ea46b-9a5d-41f7-8940-0cd040b95f58","Type":"ContainerDied","Data":"56c6c60d7fc852eb220843497ca03fad382f7be4dc897f680b2d591a2d4ca37e"} Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.550413 5017 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.550657 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.553265 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.554014 5017 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb" exitCode=0 Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.554079 5017 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673" exitCode=0 Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.554091 5017 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570" exitCode=0 Jan 22 14:06:58 crc kubenswrapper[5017]: I0122 14:06:58.554099 5017 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695" exitCode=2 Jan 22 14:06:59 crc kubenswrapper[5017]: I0122 14:06:59.560404 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7"} Jan 22 14:06:59 crc kubenswrapper[5017]: E0122 14:06:59.561216 5017 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.129.56.235:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:06:59 crc kubenswrapper[5017]: I0122 14:06:59.561336 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:06:59 crc kubenswrapper[5017]: I0122 14:06:59.885400 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:06:59 crc kubenswrapper[5017]: I0122 14:06:59.885999 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.045649 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/775ea46b-9a5d-41f7-8940-0cd040b95f58-kube-api-access\") pod \"775ea46b-9a5d-41f7-8940-0cd040b95f58\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.046240 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-var-lock\") pod \"775ea46b-9a5d-41f7-8940-0cd040b95f58\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.046322 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-kubelet-dir\") pod \"775ea46b-9a5d-41f7-8940-0cd040b95f58\" (UID: \"775ea46b-9a5d-41f7-8940-0cd040b95f58\") " Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.046581 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-var-lock" (OuterVolumeSpecName: "var-lock") pod "775ea46b-9a5d-41f7-8940-0cd040b95f58" (UID: "775ea46b-9a5d-41f7-8940-0cd040b95f58"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.046801 5017 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-var-lock\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.046838 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "775ea46b-9a5d-41f7-8940-0cd040b95f58" (UID: "775ea46b-9a5d-41f7-8940-0cd040b95f58"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.062872 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/775ea46b-9a5d-41f7-8940-0cd040b95f58-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "775ea46b-9a5d-41f7-8940-0cd040b95f58" (UID: "775ea46b-9a5d-41f7-8940-0cd040b95f58"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.147547 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/775ea46b-9a5d-41f7-8940-0cd040b95f58-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.147586 5017 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/775ea46b-9a5d-41f7-8940-0cd040b95f58-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.298589 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.299538 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.300160 5017 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.300375 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.450606 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.450726 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.450769 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.450966 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.451043 5017 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.451336 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.451350 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.552179 5017 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.552220 5017 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.567449 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"775ea46b-9a5d-41f7-8940-0cd040b95f58","Type":"ContainerDied","Data":"0e0bddc1559ae6617a191f34eb8e3ea1fc34db1833235682b0c1babee4dd60db"} Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.567494 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e0bddc1559ae6617a191f34eb8e3ea1fc34db1833235682b0c1babee4dd60db" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.567461 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.570162 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.570865 5017 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c" exitCode=0 Jan 22 14:07:00 crc kubenswrapper[5017]: E0122 14:07:00.571593 5017 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.129.56.235:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.571665 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.572012 5017 scope.go:117] "RemoveContainer" containerID="ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.582976 5017 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.583625 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.585004 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.585397 5017 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.588188 5017 scope.go:117] "RemoveContainer" containerID="e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.601830 5017 scope.go:117] "RemoveContainer" containerID="044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.615734 5017 scope.go:117] "RemoveContainer" containerID="5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.627460 5017 scope.go:117] "RemoveContainer" containerID="f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.639551 5017 scope.go:117] "RemoveContainer" containerID="9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.658947 5017 scope.go:117] "RemoveContainer" containerID="ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb" Jan 22 14:07:00 crc kubenswrapper[5017]: E0122 14:07:00.659464 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\": container with ID starting with ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb not found: ID does not exist" containerID="ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.659526 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb"} err="failed to get container status \"ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\": rpc error: code = NotFound desc = could not find container \"ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb\": container with ID starting with ee2cb0320d5937672e08c4fc7ad5a465a4358cc66c682eabd61f7072f9eafedb not found: ID does not exist" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.659560 5017 scope.go:117] "RemoveContainer" containerID="e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673" Jan 22 14:07:00 crc kubenswrapper[5017]: E0122 14:07:00.659987 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\": container with ID starting with e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673 not found: ID does not exist" containerID="e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.660064 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673"} err="failed to get container status \"e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\": rpc error: code = NotFound desc = could not find container \"e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673\": container with ID starting with e544ed5eee074cccedb51580fbe56ae2f5cd15cf11354c974fafdedaef351673 not found: ID does not exist" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.660093 5017 scope.go:117] "RemoveContainer" containerID="044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570" Jan 22 14:07:00 crc kubenswrapper[5017]: E0122 14:07:00.660456 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\": container with ID starting with 044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570 not found: ID does not exist" containerID="044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.660475 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570"} err="failed to get container status \"044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\": rpc error: code = NotFound desc = could not find container \"044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570\": container with ID starting with 044e877d3da651088cd1e512bccb73f65a5e925a99e9d9d4d92f335aed404570 not found: ID does not exist" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.660609 5017 scope.go:117] "RemoveContainer" containerID="5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695" Jan 22 14:07:00 crc kubenswrapper[5017]: E0122 14:07:00.661141 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\": container with ID starting with 5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695 not found: ID does not exist" containerID="5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.661175 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695"} err="failed to get container status \"5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\": rpc error: code = NotFound desc = could not find container \"5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695\": container with ID starting with 5d2cb7c9f81ac075b248e710796d41439c28cea34f22e97aedc7f23bd0106695 not found: ID does not exist" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.661220 5017 scope.go:117] "RemoveContainer" containerID="f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c" Jan 22 14:07:00 crc kubenswrapper[5017]: E0122 14:07:00.661738 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\": container with ID starting with f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c not found: ID does not exist" containerID="f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.661767 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c"} err="failed to get container status \"f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\": rpc error: code = NotFound desc = could not find container \"f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c\": container with ID starting with f7fc9cbc580fc23a6af614b19e3c9b4b5fd7ddacdc2e10d98b4357e052ca335c not found: ID does not exist" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.661783 5017 scope.go:117] "RemoveContainer" containerID="9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc" Jan 22 14:07:00 crc kubenswrapper[5017]: E0122 14:07:00.662013 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\": container with ID starting with 9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc not found: ID does not exist" containerID="9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc" Jan 22 14:07:00 crc kubenswrapper[5017]: I0122 14:07:00.662062 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc"} err="failed to get container status \"9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\": rpc error: code = NotFound desc = could not find container \"9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc\": container with ID starting with 9550d92a5933a393e7fa51d5845a3d5923af85d0caa1cb10d08916c4954f27cc not found: ID does not exist" Jan 22 14:07:00 crc kubenswrapper[5017]: E0122 14:07:00.797870 5017 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.235:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188d12b9e9b973a1 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 14:06:58.325967777 +0000 UTC m=+233.318429635,LastTimestamp:2026-01-22 14:06:58.325967777 +0000 UTC m=+233.318429635,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 14:07:01 crc kubenswrapper[5017]: I0122 14:07:01.265584 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 22 14:07:04 crc kubenswrapper[5017]: E0122 14:07:04.302662 5017 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.129.56.235:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" volumeName="registry-storage" Jan 22 14:07:05 crc kubenswrapper[5017]: I0122 14:07:05.260573 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:05 crc kubenswrapper[5017]: E0122 14:07:05.319763 5017 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:05 crc kubenswrapper[5017]: E0122 14:07:05.320394 5017 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:05 crc kubenswrapper[5017]: E0122 14:07:05.320698 5017 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:05 crc kubenswrapper[5017]: E0122 14:07:05.320929 5017 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:05 crc kubenswrapper[5017]: E0122 14:07:05.321261 5017 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:05 crc kubenswrapper[5017]: I0122 14:07:05.321327 5017 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 22 14:07:05 crc kubenswrapper[5017]: E0122 14:07:05.321693 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="200ms" Jan 22 14:07:05 crc kubenswrapper[5017]: E0122 14:07:05.523152 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="400ms" Jan 22 14:07:05 crc kubenswrapper[5017]: E0122 14:07:05.924422 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="800ms" Jan 22 14:07:06 crc kubenswrapper[5017]: E0122 14:07:06.724859 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="1.6s" Jan 22 14:07:08 crc kubenswrapper[5017]: E0122 14:07:08.325967 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="3.2s" Jan 22 14:07:10 crc kubenswrapper[5017]: E0122 14:07:10.798785 5017 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.235:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188d12b9e9b973a1 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 14:06:58.325967777 +0000 UTC m=+233.318429635,LastTimestamp:2026-01-22 14:06:58.325967777 +0000 UTC m=+233.318429635,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 14:07:10 crc kubenswrapper[5017]: I0122 14:07:10.805674 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 14:07:10 crc kubenswrapper[5017]: I0122 14:07:10.805727 5017 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858" exitCode=1 Jan 22 14:07:10 crc kubenswrapper[5017]: I0122 14:07:10.805761 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858"} Jan 22 14:07:10 crc kubenswrapper[5017]: I0122 14:07:10.806390 5017 scope.go:117] "RemoveContainer" containerID="857681b01f1b426a30dd74375cce9d499a2fada7e53a68cbd9ea11dba032a858" Jan 22 14:07:10 crc kubenswrapper[5017]: I0122 14:07:10.807309 5017 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:10 crc kubenswrapper[5017]: I0122 14:07:10.807536 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.257786 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.258863 5017 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.259467 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.274910 5017 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d69195ba-e14d-4a7b-bb50-d2c0bbd0283e" Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.274940 5017 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d69195ba-e14d-4a7b-bb50-d2c0bbd0283e" Jan 22 14:07:11 crc kubenswrapper[5017]: E0122 14:07:11.275417 5017 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.275988 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:11 crc kubenswrapper[5017]: W0122 14:07:11.292063 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-232b305335e0e3f8d952a9f323854adea7f7ca5c40c25d90d8d6d57175646d5c WatchSource:0}: Error finding container 232b305335e0e3f8d952a9f323854adea7f7ca5c40c25d90d8d6d57175646d5c: Status 404 returned error can't find the container with id 232b305335e0e3f8d952a9f323854adea7f7ca5c40c25d90d8d6d57175646d5c Jan 22 14:07:11 crc kubenswrapper[5017]: E0122 14:07:11.527035 5017 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.235:6443: connect: connection refused" interval="6.4s" Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.700782 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.813080 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"232b305335e0e3f8d952a9f323854adea7f7ca5c40c25d90d8d6d57175646d5c"} Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.817862 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.817982 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ab4aa664cb6d47bc97fba9d658382d07a90ecb93b842715ceda0f3500d5f17ae"} Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.819184 5017 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:11 crc kubenswrapper[5017]: I0122 14:07:11.819505 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:12 crc kubenswrapper[5017]: I0122 14:07:12.831739 5017 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="fb493c9a477589b2f640df5b3af4a994e7a2c6dbe9b74354426a06fd3acb87a8" exitCode=0 Jan 22 14:07:12 crc kubenswrapper[5017]: I0122 14:07:12.831836 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"fb493c9a477589b2f640df5b3af4a994e7a2c6dbe9b74354426a06fd3acb87a8"} Jan 22 14:07:12 crc kubenswrapper[5017]: I0122 14:07:12.832013 5017 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d69195ba-e14d-4a7b-bb50-d2c0bbd0283e" Jan 22 14:07:12 crc kubenswrapper[5017]: I0122 14:07:12.832038 5017 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d69195ba-e14d-4a7b-bb50-d2c0bbd0283e" Jan 22 14:07:12 crc kubenswrapper[5017]: I0122 14:07:12.832467 5017 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:12 crc kubenswrapper[5017]: E0122 14:07:12.832526 5017 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:12 crc kubenswrapper[5017]: I0122 14:07:12.832751 5017 status_manager.go:851] "Failed to get status for pod" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.235:6443: connect: connection refused" Jan 22 14:07:13 crc kubenswrapper[5017]: I0122 14:07:13.838684 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"96bf07c0dffaa16e60dbdd17d04b89a2ad4de810e8c34f823c6391b238439651"} Jan 22 14:07:13 crc kubenswrapper[5017]: I0122 14:07:13.839002 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4a17c7f1469e83515a9ab6a1999e9b1977e812f36dd0913044638c96808f1c77"} Jan 22 14:07:14 crc kubenswrapper[5017]: I0122 14:07:14.848034 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5615003e564b1e35ad031719c65f460f905662fd93dd31cff7c19474d8679171"} Jan 22 14:07:14 crc kubenswrapper[5017]: I0122 14:07:14.848414 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bbe1217e770fea21a414c6bc0256a1bbbbfecab0e939e7b85d23c18da98ff10d"} Jan 22 14:07:14 crc kubenswrapper[5017]: I0122 14:07:14.848435 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9a006bb00915f1652163a7a05d9b075fc02e65ac67382e15c0b5b0168c96d39f"} Jan 22 14:07:14 crc kubenswrapper[5017]: I0122 14:07:14.848591 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:14 crc kubenswrapper[5017]: I0122 14:07:14.848671 5017 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d69195ba-e14d-4a7b-bb50-d2c0bbd0283e" Jan 22 14:07:14 crc kubenswrapper[5017]: I0122 14:07:14.848699 5017 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d69195ba-e14d-4a7b-bb50-d2c0bbd0283e" Jan 22 14:07:16 crc kubenswrapper[5017]: I0122 14:07:16.276461 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:16 crc kubenswrapper[5017]: I0122 14:07:16.277201 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:16 crc kubenswrapper[5017]: I0122 14:07:16.281919 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:18 crc kubenswrapper[5017]: I0122 14:07:18.432314 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" podUID="ae146171-4a21-45b6-8484-66fbdb597a5e" containerName="oauth-openshift" containerID="cri-o://b782120a6e87582ef934563b24c110ef3a7ffd237fbdc1277b089e25711c846e" gracePeriod=15 Jan 22 14:07:18 crc kubenswrapper[5017]: I0122 14:07:18.872602 5017 generic.go:334] "Generic (PLEG): container finished" podID="ae146171-4a21-45b6-8484-66fbdb597a5e" containerID="b782120a6e87582ef934563b24c110ef3a7ffd237fbdc1277b089e25711c846e" exitCode=0 Jan 22 14:07:18 crc kubenswrapper[5017]: I0122 14:07:18.872724 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" event={"ID":"ae146171-4a21-45b6-8484-66fbdb597a5e","Type":"ContainerDied","Data":"b782120a6e87582ef934563b24c110ef3a7ffd237fbdc1277b089e25711c846e"} Jan 22 14:07:18 crc kubenswrapper[5017]: I0122 14:07:18.990561 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.044979 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wksj\" (UniqueName: \"kubernetes.io/projected/ae146171-4a21-45b6-8484-66fbdb597a5e-kube-api-access-2wksj\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.045077 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-service-ca\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.045203 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-serving-cert\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.046378 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-trusted-ca-bundle\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.046439 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-login\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.046489 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-error\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.046553 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-provider-selection\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.046813 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.047660 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-router-certs\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.047796 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-ocp-branding-template\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.048270 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.048353 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-policies\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.048388 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-session\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.048435 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-dir\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.048471 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-idp-0-file-data\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.048510 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-cliconfig\") pod \"ae146171-4a21-45b6-8484-66fbdb597a5e\" (UID: \"ae146171-4a21-45b6-8484-66fbdb597a5e\") " Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.048849 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.048823 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.049089 5017 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.049137 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.049156 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.049449 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.054234 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.054802 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.055182 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.059253 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.062332 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.062822 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.063181 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.063377 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.063875 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae146171-4a21-45b6-8484-66fbdb597a5e-kube-api-access-2wksj" (OuterVolumeSpecName: "kube-api-access-2wksj") pod "ae146171-4a21-45b6-8484-66fbdb597a5e" (UID: "ae146171-4a21-45b6-8484-66fbdb597a5e"). InnerVolumeSpecName "kube-api-access-2wksj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151008 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151065 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151089 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151107 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151157 5017 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ae146171-4a21-45b6-8484-66fbdb597a5e-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151176 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151193 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151211 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wksj\" (UniqueName: \"kubernetes.io/projected/ae146171-4a21-45b6-8484-66fbdb597a5e-kube-api-access-2wksj\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151228 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151248 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.151268 5017 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/ae146171-4a21-45b6-8484-66fbdb597a5e-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.873699 5017 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.880866 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" event={"ID":"ae146171-4a21-45b6-8484-66fbdb597a5e","Type":"ContainerDied","Data":"0e7b0c03bb68c83a1e27510454b72ceed213abc2935420101ca8f4a2374112c5"} Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.880937 5017 scope.go:117] "RemoveContainer" containerID="b782120a6e87582ef934563b24c110ef3a7ffd237fbdc1277b089e25711c846e" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.881179 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7pt94" Jan 22 14:07:19 crc kubenswrapper[5017]: I0122 14:07:19.956570 5017 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="d9d4a325-f5ac-4418-8509-d81481be1d4c" Jan 22 14:07:20 crc kubenswrapper[5017]: I0122 14:07:20.783102 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:07:20 crc kubenswrapper[5017]: I0122 14:07:20.786739 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:07:20 crc kubenswrapper[5017]: I0122 14:07:20.887542 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:07:20 crc kubenswrapper[5017]: I0122 14:07:20.887592 5017 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d69195ba-e14d-4a7b-bb50-d2c0bbd0283e" Jan 22 14:07:20 crc kubenswrapper[5017]: I0122 14:07:20.887619 5017 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d69195ba-e14d-4a7b-bb50-d2c0bbd0283e" Jan 22 14:07:20 crc kubenswrapper[5017]: I0122 14:07:20.890921 5017 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="d9d4a325-f5ac-4418-8509-d81481be1d4c" Jan 22 14:07:20 crc kubenswrapper[5017]: I0122 14:07:20.892882 5017 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://4a17c7f1469e83515a9ab6a1999e9b1977e812f36dd0913044638c96808f1c77" Jan 22 14:07:20 crc kubenswrapper[5017]: I0122 14:07:20.892911 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:21 crc kubenswrapper[5017]: I0122 14:07:21.709110 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:07:21 crc kubenswrapper[5017]: I0122 14:07:21.896871 5017 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d69195ba-e14d-4a7b-bb50-d2c0bbd0283e" Jan 22 14:07:21 crc kubenswrapper[5017]: I0122 14:07:21.896984 5017 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d69195ba-e14d-4a7b-bb50-d2c0bbd0283e" Jan 22 14:07:21 crc kubenswrapper[5017]: I0122 14:07:21.901798 5017 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="d9d4a325-f5ac-4418-8509-d81481be1d4c" Jan 22 14:07:29 crc kubenswrapper[5017]: I0122 14:07:29.197760 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 22 14:07:29 crc kubenswrapper[5017]: I0122 14:07:29.408872 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 22 14:07:29 crc kubenswrapper[5017]: I0122 14:07:29.569582 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 22 14:07:29 crc kubenswrapper[5017]: I0122 14:07:29.760356 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 14:07:30 crc kubenswrapper[5017]: I0122 14:07:30.003095 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 22 14:07:30 crc kubenswrapper[5017]: I0122 14:07:30.039256 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 14:07:30 crc kubenswrapper[5017]: I0122 14:07:30.333950 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 14:07:30 crc kubenswrapper[5017]: I0122 14:07:30.413177 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 22 14:07:30 crc kubenswrapper[5017]: I0122 14:07:30.483354 5017 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 14:07:30 crc kubenswrapper[5017]: I0122 14:07:30.485854 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.082796 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.082873 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.320034 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.338950 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.397007 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.397170 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.405065 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.435449 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.543996 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.554592 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.656365 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.672826 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.731660 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.904631 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 22 14:07:31 crc kubenswrapper[5017]: I0122 14:07:31.927613 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.003333 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.030344 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.251971 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.386948 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.574812 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.679396 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.688283 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.726782 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.817929 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.901709 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.918554 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.942430 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.994500 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 22 14:07:32 crc kubenswrapper[5017]: I0122 14:07:32.995383 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 22 14:07:33 crc kubenswrapper[5017]: I0122 14:07:33.109874 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 22 14:07:33 crc kubenswrapper[5017]: I0122 14:07:33.347518 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 14:07:33 crc kubenswrapper[5017]: I0122 14:07:33.349175 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 22 14:07:33 crc kubenswrapper[5017]: I0122 14:07:33.589196 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 22 14:07:33 crc kubenswrapper[5017]: I0122 14:07:33.617717 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 22 14:07:33 crc kubenswrapper[5017]: I0122 14:07:33.630939 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 22 14:07:33 crc kubenswrapper[5017]: I0122 14:07:33.659842 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 22 14:07:33 crc kubenswrapper[5017]: I0122 14:07:33.666362 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.041376 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.097097 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.259990 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.338311 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.406956 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.471272 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.472983 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.516014 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.611378 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.621073 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.724068 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.786724 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 22 14:07:34 crc kubenswrapper[5017]: I0122 14:07:34.847482 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.088885 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.111800 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.150203 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.203534 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.219086 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.220405 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.222488 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.257845 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.310021 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.355423 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.462385 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.479867 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.486012 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.564530 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.571366 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.626480 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.701170 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.701249 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.702068 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.728796 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.733521 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.791545 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.803001 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.859330 5017 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.863783 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-7pt94"] Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.863849 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.870754 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.880753 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=16.880738884 podStartE2EDuration="16.880738884s" podCreationTimestamp="2026-01-22 14:07:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:07:35.879152147 +0000 UTC m=+270.871614015" watchObservedRunningTime="2026-01-22 14:07:35.880738884 +0000 UTC m=+270.873200742" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.961271 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 22 14:07:35 crc kubenswrapper[5017]: I0122 14:07:35.966054 5017 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.049157 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.049869 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.056735 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.079426 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.115216 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.119686 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.254494 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.274639 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.282186 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.298830 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.347512 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.421625 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.445033 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.469254 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.480004 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.561847 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.652032 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.758675 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.784270 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.797957 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.846820 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.932764 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 22 14:07:36 crc kubenswrapper[5017]: I0122 14:07:36.951011 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.031207 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.055797 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.078309 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.100790 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.108297 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.109422 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.252684 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.263467 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae146171-4a21-45b6-8484-66fbdb597a5e" path="/var/lib/kubelet/pods/ae146171-4a21-45b6-8484-66fbdb597a5e/volumes" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.368821 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.461820 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.594020 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.693034 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.712778 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.712880 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.833536 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.842240 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.903027 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.910036 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 22 14:07:37 crc kubenswrapper[5017]: I0122 14:07:37.955137 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.045980 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.201147 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.204392 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.259629 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.271962 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.359776 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.387167 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.438001 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.479406 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.479846 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.489702 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.617979 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.661680 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.860763 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.915446 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 14:07:38 crc kubenswrapper[5017]: I0122 14:07:38.988961 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.091901 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.109462 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.178444 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.260087 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.283783 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.328594 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.471334 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.686102 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.708620 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.790465 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.877177 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.879004 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.924618 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.932746 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 14:07:39 crc kubenswrapper[5017]: I0122 14:07:39.952029 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.046095 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.115731 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.148106 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.237483 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.271863 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.273395 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.296895 5017 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.309209 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.379311 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.396603 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.412040 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.472559 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.509924 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.642399 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.654148 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.734417 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.745220 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.826253 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 22 14:07:40 crc kubenswrapper[5017]: I0122 14:07:40.960003 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.060526 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.061516 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.129625 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.301930 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.335681 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.407458 5017 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.413050 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.510606 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.516027 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.543613 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.567201 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.604652 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.604878 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.682800 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.755601 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.772532 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.831807 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.840059 5017 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 22 14:07:41 crc kubenswrapper[5017]: I0122 14:07:41.880314 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.072812 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.135983 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.163157 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.164515 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.261191 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.290887 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.304184 5017 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.304430 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7" gracePeriod=5 Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.324859 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.359854 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.412711 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.548023 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.586302 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.708017 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.756444 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.785770 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.789242 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.838244 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.972488 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 22 14:07:42 crc kubenswrapper[5017]: I0122 14:07:42.999712 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.172822 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.357153 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.378607 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5"] Jan 22 14:07:43 crc kubenswrapper[5017]: E0122 14:07:43.378873 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae146171-4a21-45b6-8484-66fbdb597a5e" containerName="oauth-openshift" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.378888 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae146171-4a21-45b6-8484-66fbdb597a5e" containerName="oauth-openshift" Jan 22 14:07:43 crc kubenswrapper[5017]: E0122 14:07:43.378903 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.378910 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 14:07:43 crc kubenswrapper[5017]: E0122 14:07:43.378927 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" containerName="installer" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.378935 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" containerName="installer" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.379049 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae146171-4a21-45b6-8484-66fbdb597a5e" containerName="oauth-openshift" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.379063 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="775ea46b-9a5d-41f7-8940-0cd040b95f58" containerName="installer" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.379073 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.379560 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.381637 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.382029 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.382090 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.382105 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.382133 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.382332 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.382824 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.384453 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.384601 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.394778 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.394815 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.395292 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.397943 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.398925 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5"] Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.406178 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.411369 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444313 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-template-login\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444357 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-audit-policies\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444379 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444407 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444428 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-service-ca\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444446 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-router-certs\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444469 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444490 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd4x4\" (UniqueName: \"kubernetes.io/projected/069b753a-61e6-4603-9faa-4d92a521f978-kube-api-access-rd4x4\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444515 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444534 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444555 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-session\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444572 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/069b753a-61e6-4603-9faa-4d92a521f978-audit-dir\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444595 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-template-error\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.444609 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.461187 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545722 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-template-error\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545781 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545812 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-template-login\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545836 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-audit-policies\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545856 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545885 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545904 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-service-ca\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545919 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-router-certs\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545944 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545964 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd4x4\" (UniqueName: \"kubernetes.io/projected/069b753a-61e6-4603-9faa-4d92a521f978-kube-api-access-rd4x4\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.545990 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.546010 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.546025 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-session\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.546045 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/069b753a-61e6-4603-9faa-4d92a521f978-audit-dir\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.546137 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/069b753a-61e6-4603-9faa-4d92a521f978-audit-dir\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.546693 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-audit-policies\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.546771 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.547285 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-service-ca\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.548144 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.552821 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-template-error\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.553185 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.553519 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-template-login\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.554495 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.555489 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-session\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.555953 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.556901 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.563445 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd4x4\" (UniqueName: \"kubernetes.io/projected/069b753a-61e6-4603-9faa-4d92a521f978-kube-api-access-rd4x4\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.570687 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/069b753a-61e6-4603-9faa-4d92a521f978-v4-0-config-system-router-certs\") pod \"oauth-openshift-5fbdbc85dc-w2wv5\" (UID: \"069b753a-61e6-4603-9faa-4d92a521f978\") " pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.697158 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.774280 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.869166 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.919304 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 22 14:07:43 crc kubenswrapper[5017]: I0122 14:07:43.935454 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 14:07:44 crc kubenswrapper[5017]: I0122 14:07:44.014689 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 14:07:44 crc kubenswrapper[5017]: I0122 14:07:44.106232 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5"] Jan 22 14:07:44 crc kubenswrapper[5017]: I0122 14:07:44.126534 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 22 14:07:44 crc kubenswrapper[5017]: I0122 14:07:44.137770 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 22 14:07:44 crc kubenswrapper[5017]: I0122 14:07:44.318876 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 22 14:07:44 crc kubenswrapper[5017]: I0122 14:07:44.725172 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.019858 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" event={"ID":"069b753a-61e6-4603-9faa-4d92a521f978","Type":"ContainerStarted","Data":"16abfcb8dc1b01991f13817e0c1415e8d8282cd50f15f843e32eff9d1609b40b"} Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.019922 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" event={"ID":"069b753a-61e6-4603-9faa-4d92a521f978","Type":"ContainerStarted","Data":"ee67b4527dccfa33010cc65526bc96a29d4c109325e726c685718e8be78be7fc"} Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.024007 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.027999 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.048854 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.071200 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5fbdbc85dc-w2wv5" podStartSLOduration=52.071184794 podStartE2EDuration="52.071184794s" podCreationTimestamp="2026-01-22 14:06:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:07:45.04664449 +0000 UTC m=+280.039106388" watchObservedRunningTime="2026-01-22 14:07:45.071184794 +0000 UTC m=+280.063646652" Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.203069 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.221058 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.276859 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.448217 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 22 14:07:45 crc kubenswrapper[5017]: I0122 14:07:45.459195 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 22 14:07:46 crc kubenswrapper[5017]: I0122 14:07:46.156523 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 14:07:46 crc kubenswrapper[5017]: I0122 14:07:46.386669 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.456276 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.456678 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.606724 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.606833 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.606891 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.606946 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.606981 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.607353 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.607501 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.607706 5017 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.607744 5017 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.607807 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.607859 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.616892 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.709026 5017 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.709058 5017 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.709068 5017 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:47 crc kubenswrapper[5017]: I0122 14:07:47.864069 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 22 14:07:48 crc kubenswrapper[5017]: I0122 14:07:48.034529 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 22 14:07:48 crc kubenswrapper[5017]: I0122 14:07:48.034586 5017 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7" exitCode=137 Jan 22 14:07:48 crc kubenswrapper[5017]: I0122 14:07:48.034637 5017 scope.go:117] "RemoveContainer" containerID="567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7" Jan 22 14:07:48 crc kubenswrapper[5017]: I0122 14:07:48.034680 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:07:48 crc kubenswrapper[5017]: I0122 14:07:48.055043 5017 scope.go:117] "RemoveContainer" containerID="567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7" Jan 22 14:07:48 crc kubenswrapper[5017]: E0122 14:07:48.056187 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7\": container with ID starting with 567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7 not found: ID does not exist" containerID="567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7" Jan 22 14:07:48 crc kubenswrapper[5017]: I0122 14:07:48.056218 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7"} err="failed to get container status \"567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7\": rpc error: code = NotFound desc = could not find container \"567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7\": container with ID starting with 567991fb70d2010490e7a154cec40e85f739b506c7eaaa790459201931f99fb7 not found: ID does not exist" Jan 22 14:07:49 crc kubenswrapper[5017]: I0122 14:07:49.264255 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 22 14:08:01 crc kubenswrapper[5017]: I0122 14:08:01.106525 5017 generic.go:334] "Generic (PLEG): container finished" podID="faf7a532-753d-4862-812c-3ef174c75f81" containerID="f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9" exitCode=0 Jan 22 14:08:01 crc kubenswrapper[5017]: I0122 14:08:01.106641 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" event={"ID":"faf7a532-753d-4862-812c-3ef174c75f81","Type":"ContainerDied","Data":"f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9"} Jan 22 14:08:01 crc kubenswrapper[5017]: I0122 14:08:01.107485 5017 scope.go:117] "RemoveContainer" containerID="f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9" Jan 22 14:08:02 crc kubenswrapper[5017]: I0122 14:08:02.113592 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" event={"ID":"faf7a532-753d-4862-812c-3ef174c75f81","Type":"ContainerStarted","Data":"f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529"} Jan 22 14:08:02 crc kubenswrapper[5017]: I0122 14:08:02.114149 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:08:02 crc kubenswrapper[5017]: I0122 14:08:02.115973 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:08:04 crc kubenswrapper[5017]: I0122 14:08:04.099773 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 22 14:08:05 crc kubenswrapper[5017]: I0122 14:08:05.140915 5017 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 22 14:08:18 crc kubenswrapper[5017]: I0122 14:08:18.046395 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.860431 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rl6fc"] Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.861884 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.881487 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rl6fc"] Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.985626 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/541f3c17-7361-4c0d-ac89-9a86926fd1e3-trusted-ca\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.985667 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9v6jb\" (UniqueName: \"kubernetes.io/projected/541f3c17-7361-4c0d-ac89-9a86926fd1e3-kube-api-access-9v6jb\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.985712 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.985735 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/541f3c17-7361-4c0d-ac89-9a86926fd1e3-registry-certificates\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.985761 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/541f3c17-7361-4c0d-ac89-9a86926fd1e3-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.985783 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/541f3c17-7361-4c0d-ac89-9a86926fd1e3-registry-tls\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.985805 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/541f3c17-7361-4c0d-ac89-9a86926fd1e3-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:01 crc kubenswrapper[5017]: I0122 14:09:01.985935 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/541f3c17-7361-4c0d-ac89-9a86926fd1e3-bound-sa-token\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.010004 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.088081 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/541f3c17-7361-4c0d-ac89-9a86926fd1e3-registry-certificates\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.088169 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/541f3c17-7361-4c0d-ac89-9a86926fd1e3-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.088208 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/541f3c17-7361-4c0d-ac89-9a86926fd1e3-registry-tls\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.088241 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/541f3c17-7361-4c0d-ac89-9a86926fd1e3-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.088267 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/541f3c17-7361-4c0d-ac89-9a86926fd1e3-bound-sa-token\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.088317 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/541f3c17-7361-4c0d-ac89-9a86926fd1e3-trusted-ca\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.088355 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9v6jb\" (UniqueName: \"kubernetes.io/projected/541f3c17-7361-4c0d-ac89-9a86926fd1e3-kube-api-access-9v6jb\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.088802 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/541f3c17-7361-4c0d-ac89-9a86926fd1e3-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.089624 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/541f3c17-7361-4c0d-ac89-9a86926fd1e3-trusted-ca\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.089796 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/541f3c17-7361-4c0d-ac89-9a86926fd1e3-registry-certificates\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.097371 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/541f3c17-7361-4c0d-ac89-9a86926fd1e3-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.097391 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/541f3c17-7361-4c0d-ac89-9a86926fd1e3-registry-tls\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.105847 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9v6jb\" (UniqueName: \"kubernetes.io/projected/541f3c17-7361-4c0d-ac89-9a86926fd1e3-kube-api-access-9v6jb\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.112198 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/541f3c17-7361-4c0d-ac89-9a86926fd1e3-bound-sa-token\") pod \"image-registry-66df7c8f76-rl6fc\" (UID: \"541f3c17-7361-4c0d-ac89-9a86926fd1e3\") " pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.177430 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:02 crc kubenswrapper[5017]: I0122 14:09:02.448432 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rl6fc"] Jan 22 14:09:03 crc kubenswrapper[5017]: I0122 14:09:03.432179 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" event={"ID":"541f3c17-7361-4c0d-ac89-9a86926fd1e3","Type":"ContainerStarted","Data":"9ee9731cb341ffa993c80914ede0ca2a701548fbe87790d1ecb2421af7363483"} Jan 22 14:09:03 crc kubenswrapper[5017]: I0122 14:09:03.432515 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:03 crc kubenswrapper[5017]: I0122 14:09:03.432526 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" event={"ID":"541f3c17-7361-4c0d-ac89-9a86926fd1e3","Type":"ContainerStarted","Data":"baf631aa70fb8d1bead4c0c464950cb29d55372f14198a25c1575e0d9c3476e1"} Jan 22 14:09:03 crc kubenswrapper[5017]: I0122 14:09:03.453194 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" podStartSLOduration=2.453175768 podStartE2EDuration="2.453175768s" podCreationTimestamp="2026-01-22 14:09:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:09:03.448142854 +0000 UTC m=+358.440604712" watchObservedRunningTime="2026-01-22 14:09:03.453175768 +0000 UTC m=+358.445637626" Jan 22 14:09:22 crc kubenswrapper[5017]: I0122 14:09:22.185168 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-rl6fc" Jan 22 14:09:22 crc kubenswrapper[5017]: I0122 14:09:22.242543 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tn54n"] Jan 22 14:09:22 crc kubenswrapper[5017]: I0122 14:09:22.883825 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:09:22 crc kubenswrapper[5017]: I0122 14:09:22.884203 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.192997 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jkvz7"] Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.193336 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jkvz7" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerName="registry-server" containerID="cri-o://486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25" gracePeriod=30 Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.209362 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v9nd6"] Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.209672 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v9nd6" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerName="registry-server" containerID="cri-o://a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f" gracePeriod=30 Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.219739 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7h4hj"] Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.220022 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" podUID="faf7a532-753d-4862-812c-3ef174c75f81" containerName="marketplace-operator" containerID="cri-o://f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529" gracePeriod=30 Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.230234 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h9dv2"] Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.230472 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h9dv2" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerName="registry-server" containerID="cri-o://25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1" gracePeriod=30 Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.239319 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5fttm"] Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.240365 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.244510 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xhq9v"] Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.244732 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xhq9v" podUID="194b80b4-3a8d-4530-8581-53cc45997936" containerName="registry-server" containerID="cri-o://95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2" gracePeriod=30 Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.251269 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5fttm"] Jan 22 14:09:23 crc kubenswrapper[5017]: E0122 14:09:23.323968 5017 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2 is running failed: container process not found" containerID="95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2" cmd=["grpc_health_probe","-addr=:50051"] Jan 22 14:09:23 crc kubenswrapper[5017]: E0122 14:09:23.326928 5017 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2 is running failed: container process not found" containerID="95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2" cmd=["grpc_health_probe","-addr=:50051"] Jan 22 14:09:23 crc kubenswrapper[5017]: E0122 14:09:23.334214 5017 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2 is running failed: container process not found" containerID="95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2" cmd=["grpc_health_probe","-addr=:50051"] Jan 22 14:09:23 crc kubenswrapper[5017]: E0122 14:09:23.334276 5017 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-xhq9v" podUID="194b80b4-3a8d-4530-8581-53cc45997936" containerName="registry-server" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.398012 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/63cce9b3-3f85-4447-8f12-77f4b35c9d6f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5fttm\" (UID: \"63cce9b3-3f85-4447-8f12-77f4b35c9d6f\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.398345 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/63cce9b3-3f85-4447-8f12-77f4b35c9d6f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5fttm\" (UID: \"63cce9b3-3f85-4447-8f12-77f4b35c9d6f\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.398489 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvsq7\" (UniqueName: \"kubernetes.io/projected/63cce9b3-3f85-4447-8f12-77f4b35c9d6f-kube-api-access-lvsq7\") pod \"marketplace-operator-79b997595-5fttm\" (UID: \"63cce9b3-3f85-4447-8f12-77f4b35c9d6f\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.499450 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvsq7\" (UniqueName: \"kubernetes.io/projected/63cce9b3-3f85-4447-8f12-77f4b35c9d6f-kube-api-access-lvsq7\") pod \"marketplace-operator-79b997595-5fttm\" (UID: \"63cce9b3-3f85-4447-8f12-77f4b35c9d6f\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.499536 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/63cce9b3-3f85-4447-8f12-77f4b35c9d6f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5fttm\" (UID: \"63cce9b3-3f85-4447-8f12-77f4b35c9d6f\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.499559 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/63cce9b3-3f85-4447-8f12-77f4b35c9d6f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5fttm\" (UID: \"63cce9b3-3f85-4447-8f12-77f4b35c9d6f\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.500845 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/63cce9b3-3f85-4447-8f12-77f4b35c9d6f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5fttm\" (UID: \"63cce9b3-3f85-4447-8f12-77f4b35c9d6f\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.510249 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/63cce9b3-3f85-4447-8f12-77f4b35c9d6f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5fttm\" (UID: \"63cce9b3-3f85-4447-8f12-77f4b35c9d6f\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.513934 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvsq7\" (UniqueName: \"kubernetes.io/projected/63cce9b3-3f85-4447-8f12-77f4b35c9d6f-kube-api-access-lvsq7\") pod \"marketplace-operator-79b997595-5fttm\" (UID: \"63cce9b3-3f85-4447-8f12-77f4b35c9d6f\") " pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.564051 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.785846 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5fttm"] Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.880925 5017 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-7h4hj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Jan 22 14:09:23 crc kubenswrapper[5017]: I0122 14:09:23.881044 5017 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" podUID="faf7a532-753d-4862-812c-3ef174c75f81" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.18:8080/healthz\": dial tcp 10.217.0.18:8080: connect: connection refused" Jan 22 14:09:23 crc kubenswrapper[5017]: W0122 14:09:23.896830 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63cce9b3_3f85_4447_8f12_77f4b35c9d6f.slice/crio-9216985399abbd2dd272f5e635c35c6c58452aa4a6cf090286ec51fba68b4dd4 WatchSource:0}: Error finding container 9216985399abbd2dd272f5e635c35c6c58452aa4a6cf090286ec51fba68b4dd4: Status 404 returned error can't find the container with id 9216985399abbd2dd272f5e635c35c6c58452aa4a6cf090286ec51fba68b4dd4 Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.165319 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.240782 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.246236 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.256608 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.309271 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4qws\" (UniqueName: \"kubernetes.io/projected/51b300c4-69d8-49d7-a5e4-ec3054ceed30-kube-api-access-c4qws\") pod \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.309419 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-utilities\") pod \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.309610 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-catalog-content\") pod \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\" (UID: \"51b300c4-69d8-49d7-a5e4-ec3054ceed30\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.310985 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-utilities" (OuterVolumeSpecName: "utilities") pod "51b300c4-69d8-49d7-a5e4-ec3054ceed30" (UID: "51b300c4-69d8-49d7-a5e4-ec3054ceed30"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.315971 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.317271 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51b300c4-69d8-49d7-a5e4-ec3054ceed30-kube-api-access-c4qws" (OuterVolumeSpecName: "kube-api-access-c4qws") pod "51b300c4-69d8-49d7-a5e4-ec3054ceed30" (UID: "51b300c4-69d8-49d7-a5e4-ec3054ceed30"). InnerVolumeSpecName "kube-api-access-c4qws". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.372191 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "51b300c4-69d8-49d7-a5e4-ec3054ceed30" (UID: "51b300c4-69d8-49d7-a5e4-ec3054ceed30"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.416794 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-operator-metrics\") pod \"faf7a532-753d-4862-812c-3ef174c75f81\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.416872 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-catalog-content\") pod \"a773a1c5-5756-429a-950d-7e8b8c95341f\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.416919 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9nz6\" (UniqueName: \"kubernetes.io/projected/a773a1c5-5756-429a-950d-7e8b8c95341f-kube-api-access-m9nz6\") pod \"a773a1c5-5756-429a-950d-7e8b8c95341f\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.416967 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-trusted-ca\") pod \"faf7a532-753d-4862-812c-3ef174c75f81\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.416989 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-utilities\") pod \"a773a1c5-5756-429a-950d-7e8b8c95341f\" (UID: \"a773a1c5-5756-429a-950d-7e8b8c95341f\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.417028 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2zz9\" (UniqueName: \"kubernetes.io/projected/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-kube-api-access-m2zz9\") pod \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.417053 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-catalog-content\") pod \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.417137 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-utilities\") pod \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\" (UID: \"c820f9cc-89d7-40d4-9113-8ac2982a3bbf\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.417174 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fm8tf\" (UniqueName: \"kubernetes.io/projected/faf7a532-753d-4862-812c-3ef174c75f81-kube-api-access-fm8tf\") pod \"faf7a532-753d-4862-812c-3ef174c75f81\" (UID: \"faf7a532-753d-4862-812c-3ef174c75f81\") " Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.418966 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51b300c4-69d8-49d7-a5e4-ec3054ceed30-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.418993 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4qws\" (UniqueName: \"kubernetes.io/projected/51b300c4-69d8-49d7-a5e4-ec3054ceed30-kube-api-access-c4qws\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.419879 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "faf7a532-753d-4862-812c-3ef174c75f81" (UID: "faf7a532-753d-4862-812c-3ef174c75f81"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.420027 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-utilities" (OuterVolumeSpecName: "utilities") pod "a773a1c5-5756-429a-950d-7e8b8c95341f" (UID: "a773a1c5-5756-429a-950d-7e8b8c95341f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.420868 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-utilities" (OuterVolumeSpecName: "utilities") pod "c820f9cc-89d7-40d4-9113-8ac2982a3bbf" (UID: "c820f9cc-89d7-40d4-9113-8ac2982a3bbf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.421847 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "faf7a532-753d-4862-812c-3ef174c75f81" (UID: "faf7a532-753d-4862-812c-3ef174c75f81"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.421967 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-kube-api-access-m2zz9" (OuterVolumeSpecName: "kube-api-access-m2zz9") pod "c820f9cc-89d7-40d4-9113-8ac2982a3bbf" (UID: "c820f9cc-89d7-40d4-9113-8ac2982a3bbf"). InnerVolumeSpecName "kube-api-access-m2zz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.423487 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faf7a532-753d-4862-812c-3ef174c75f81-kube-api-access-fm8tf" (OuterVolumeSpecName: "kube-api-access-fm8tf") pod "faf7a532-753d-4862-812c-3ef174c75f81" (UID: "faf7a532-753d-4862-812c-3ef174c75f81"). InnerVolumeSpecName "kube-api-access-fm8tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.423746 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a773a1c5-5756-429a-950d-7e8b8c95341f-kube-api-access-m9nz6" (OuterVolumeSpecName: "kube-api-access-m9nz6") pod "a773a1c5-5756-429a-950d-7e8b8c95341f" (UID: "a773a1c5-5756-429a-950d-7e8b8c95341f"). InnerVolumeSpecName "kube-api-access-m9nz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.446042 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c820f9cc-89d7-40d4-9113-8ac2982a3bbf" (UID: "c820f9cc-89d7-40d4-9113-8ac2982a3bbf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.485514 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a773a1c5-5756-429a-950d-7e8b8c95341f" (UID: "a773a1c5-5756-429a-950d-7e8b8c95341f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.524352 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.524394 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2zz9\" (UniqueName: \"kubernetes.io/projected/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-kube-api-access-m2zz9\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.524410 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.524419 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c820f9cc-89d7-40d4-9113-8ac2982a3bbf-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.524442 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fm8tf\" (UniqueName: \"kubernetes.io/projected/faf7a532-753d-4862-812c-3ef174c75f81-kube-api-access-fm8tf\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.524452 5017 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.524461 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a773a1c5-5756-429a-950d-7e8b8c95341f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.524470 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9nz6\" (UniqueName: \"kubernetes.io/projected/a773a1c5-5756-429a-950d-7e8b8c95341f-kube-api-access-m9nz6\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.524482 5017 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/faf7a532-753d-4862-812c-3ef174c75f81-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.553318 5017 generic.go:334] "Generic (PLEG): container finished" podID="194b80b4-3a8d-4530-8581-53cc45997936" containerID="95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2" exitCode=0 Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.553377 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xhq9v" event={"ID":"194b80b4-3a8d-4530-8581-53cc45997936","Type":"ContainerDied","Data":"95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.556296 5017 generic.go:334] "Generic (PLEG): container finished" podID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerID="25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1" exitCode=0 Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.556358 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h9dv2" event={"ID":"c820f9cc-89d7-40d4-9113-8ac2982a3bbf","Type":"ContainerDied","Data":"25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.556373 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h9dv2" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.556393 5017 scope.go:117] "RemoveContainer" containerID="25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.556382 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h9dv2" event={"ID":"c820f9cc-89d7-40d4-9113-8ac2982a3bbf","Type":"ContainerDied","Data":"f3974050ab0e4e907f9b9a4afe75e261c605113a39e65ce433b459d8db5bc074"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.558588 5017 generic.go:334] "Generic (PLEG): container finished" podID="faf7a532-753d-4862-812c-3ef174c75f81" containerID="f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529" exitCode=0 Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.558644 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.558640 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" event={"ID":"faf7a532-753d-4862-812c-3ef174c75f81","Type":"ContainerDied","Data":"f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.558698 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7h4hj" event={"ID":"faf7a532-753d-4862-812c-3ef174c75f81","Type":"ContainerDied","Data":"fbc8a82c622ea9d02a9b90152d12d1a97189b0f71ae2d002919b398f60d535fc"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.561379 5017 generic.go:334] "Generic (PLEG): container finished" podID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerID="a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f" exitCode=0 Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.561446 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9nd6" event={"ID":"a773a1c5-5756-429a-950d-7e8b8c95341f","Type":"ContainerDied","Data":"a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.561493 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v9nd6" event={"ID":"a773a1c5-5756-429a-950d-7e8b8c95341f","Type":"ContainerDied","Data":"e52e1ddf8b3781e44a5eab79a10553800097c43eaa8dc60e12fe86f4cea3a816"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.561457 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v9nd6" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.573327 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" event={"ID":"63cce9b3-3f85-4447-8f12-77f4b35c9d6f","Type":"ContainerStarted","Data":"d1caa587ad25db1f9045704523114499af7fbd981e4bd8fdf2348cfcbedd7850"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.573378 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" event={"ID":"63cce9b3-3f85-4447-8f12-77f4b35c9d6f","Type":"ContainerStarted","Data":"9216985399abbd2dd272f5e635c35c6c58452aa4a6cf090286ec51fba68b4dd4"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.576560 5017 scope.go:117] "RemoveContainer" containerID="e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.577813 5017 generic.go:334] "Generic (PLEG): container finished" podID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerID="486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25" exitCode=0 Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.577856 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkvz7" event={"ID":"51b300c4-69d8-49d7-a5e4-ec3054ceed30","Type":"ContainerDied","Data":"486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.577889 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkvz7" event={"ID":"51b300c4-69d8-49d7-a5e4-ec3054ceed30","Type":"ContainerDied","Data":"966d05eb7ecb97edbdcdc11a32e3e6500e84256f1ee612ddf7a6e836fefc1f51"} Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.578002 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkvz7" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.597683 5017 scope.go:117] "RemoveContainer" containerID="9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.640553 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h9dv2"] Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.644320 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h9dv2"] Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.647652 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v9nd6"] Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.653238 5017 scope.go:117] "RemoveContainer" containerID="25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1" Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.655630 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1\": container with ID starting with 25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1 not found: ID does not exist" containerID="25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.655675 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1"} err="failed to get container status \"25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1\": rpc error: code = NotFound desc = could not find container \"25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1\": container with ID starting with 25c21c0031440a32caa61981641979d52fbb41b624274cd02e47d8546cbff1b1 not found: ID does not exist" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.655722 5017 scope.go:117] "RemoveContainer" containerID="e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.656442 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v9nd6"] Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.656579 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482\": container with ID starting with e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482 not found: ID does not exist" containerID="e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.656612 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482"} err="failed to get container status \"e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482\": rpc error: code = NotFound desc = could not find container \"e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482\": container with ID starting with e1423d241a134f90e100a3ae77a80ea1bbd4a095825be78c44174016f504b482 not found: ID does not exist" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.656645 5017 scope.go:117] "RemoveContainer" containerID="9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4" Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.658289 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4\": container with ID starting with 9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4 not found: ID does not exist" containerID="9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.658318 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4"} err="failed to get container status \"9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4\": rpc error: code = NotFound desc = could not find container \"9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4\": container with ID starting with 9e1b0c64fc16c79d3186b4b20a3e5a5edd2a6a67a2d4f7a28bdd525bd89f0bd4 not found: ID does not exist" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.658337 5017 scope.go:117] "RemoveContainer" containerID="f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.663435 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jkvz7"] Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.666386 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jkvz7"] Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.673781 5017 scope.go:117] "RemoveContainer" containerID="f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.674605 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7h4hj"] Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.679737 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7h4hj"] Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.690942 5017 scope.go:117] "RemoveContainer" containerID="f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529" Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.694196 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529\": container with ID starting with f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529 not found: ID does not exist" containerID="f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.694255 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529"} err="failed to get container status \"f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529\": rpc error: code = NotFound desc = could not find container \"f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529\": container with ID starting with f30defb7d26ec58b03741527c15da877669edfaba7ffef57fdcdc7a92eea9529 not found: ID does not exist" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.694286 5017 scope.go:117] "RemoveContainer" containerID="f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9" Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.694754 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9\": container with ID starting with f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9 not found: ID does not exist" containerID="f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.694810 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9"} err="failed to get container status \"f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9\": rpc error: code = NotFound desc = could not find container \"f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9\": container with ID starting with f6da17dc2842694a94757d84e60b90264ef8ccdbc285d97eadd9687b3ff219b9 not found: ID does not exist" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.694839 5017 scope.go:117] "RemoveContainer" containerID="a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.706773 5017 scope.go:117] "RemoveContainer" containerID="9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.727616 5017 scope.go:117] "RemoveContainer" containerID="feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.744389 5017 scope.go:117] "RemoveContainer" containerID="a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f" Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.745333 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f\": container with ID starting with a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f not found: ID does not exist" containerID="a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.745380 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f"} err="failed to get container status \"a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f\": rpc error: code = NotFound desc = could not find container \"a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f\": container with ID starting with a2100c38a29c0b77cf8dda8d3ee36c76953299ba4465a2da718bd791e957d18f not found: ID does not exist" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.745416 5017 scope.go:117] "RemoveContainer" containerID="9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99" Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.746311 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99\": container with ID starting with 9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99 not found: ID does not exist" containerID="9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.746393 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99"} err="failed to get container status \"9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99\": rpc error: code = NotFound desc = could not find container \"9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99\": container with ID starting with 9c29fd852629b3fee495709faf68bab98393198a2d1ec7c97a4a489f8051ad99 not found: ID does not exist" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.746461 5017 scope.go:117] "RemoveContainer" containerID="feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c" Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.746941 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c\": container with ID starting with feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c not found: ID does not exist" containerID="feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.746982 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c"} err="failed to get container status \"feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c\": rpc error: code = NotFound desc = could not find container \"feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c\": container with ID starting with feb9ff9e2ec7f30b30f36847e06c489347dad533a0fbb0045688cb66a0e9561c not found: ID does not exist" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.747011 5017 scope.go:117] "RemoveContainer" containerID="486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.761551 5017 scope.go:117] "RemoveContainer" containerID="9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.778042 5017 scope.go:117] "RemoveContainer" containerID="feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.803304 5017 scope.go:117] "RemoveContainer" containerID="486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25" Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.804002 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25\": container with ID starting with 486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25 not found: ID does not exist" containerID="486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.804054 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25"} err="failed to get container status \"486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25\": rpc error: code = NotFound desc = could not find container \"486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25\": container with ID starting with 486d6448ffbafc9847e83046b48670632ced71cf9734b4254b322c19b18ebf25 not found: ID does not exist" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.804080 5017 scope.go:117] "RemoveContainer" containerID="9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29" Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.804650 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29\": container with ID starting with 9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29 not found: ID does not exist" containerID="9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.804709 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29"} err="failed to get container status \"9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29\": rpc error: code = NotFound desc = could not find container \"9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29\": container with ID starting with 9bb14a85d9c57c3c68305746f33eb6063460f04dd8567e8d37354718e6c87b29 not found: ID does not exist" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.804753 5017 scope.go:117] "RemoveContainer" containerID="feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d" Jan 22 14:09:24 crc kubenswrapper[5017]: E0122 14:09:24.805168 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d\": container with ID starting with feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d not found: ID does not exist" containerID="feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d" Jan 22 14:09:24 crc kubenswrapper[5017]: I0122 14:09:24.805218 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d"} err="failed to get container status \"feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d\": rpc error: code = NotFound desc = could not find container \"feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d\": container with ID starting with feb3a924e1d1e8ee9e98f9d309a03d841290d7d93fac6dca0f56fa6b3814b55d not found: ID does not exist" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202148 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8jlwk"] Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202386 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerName="registry-server" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202400 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerName="registry-server" Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202411 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerName="extract-content" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202418 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerName="extract-content" Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202427 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerName="extract-utilities" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202435 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerName="extract-utilities" Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202446 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerName="extract-content" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202455 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerName="extract-content" Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202467 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faf7a532-753d-4862-812c-3ef174c75f81" containerName="marketplace-operator" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202474 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="faf7a532-753d-4862-812c-3ef174c75f81" containerName="marketplace-operator" Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202485 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerName="extract-content" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202494 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerName="extract-content" Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202501 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerName="registry-server" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202509 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerName="registry-server" Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202524 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faf7a532-753d-4862-812c-3ef174c75f81" containerName="marketplace-operator" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202531 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="faf7a532-753d-4862-812c-3ef174c75f81" containerName="marketplace-operator" Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202541 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerName="extract-utilities" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202547 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerName="extract-utilities" Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202554 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerName="extract-utilities" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202559 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerName="extract-utilities" Jan 22 14:09:25 crc kubenswrapper[5017]: E0122 14:09:25.202567 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerName="registry-server" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202573 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerName="registry-server" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202661 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" containerName="registry-server" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202675 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="faf7a532-753d-4862-812c-3ef174c75f81" containerName="marketplace-operator" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202683 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" containerName="registry-server" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202693 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="faf7a532-753d-4862-812c-3ef174c75f81" containerName="marketplace-operator" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.202703 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" containerName="registry-server" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.203434 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.213708 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.216879 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8jlwk"] Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.273806 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51b300c4-69d8-49d7-a5e4-ec3054ceed30" path="/var/lib/kubelet/pods/51b300c4-69d8-49d7-a5e4-ec3054ceed30/volumes" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.274734 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a773a1c5-5756-429a-950d-7e8b8c95341f" path="/var/lib/kubelet/pods/a773a1c5-5756-429a-950d-7e8b8c95341f/volumes" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.275677 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c820f9cc-89d7-40d4-9113-8ac2982a3bbf" path="/var/lib/kubelet/pods/c820f9cc-89d7-40d4-9113-8ac2982a3bbf/volumes" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.276998 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faf7a532-753d-4862-812c-3ef174c75f81" path="/var/lib/kubelet/pods/faf7a532-753d-4862-812c-3ef174c75f81/volumes" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.334921 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a572a90-9417-4bac-a648-227d3f892664-utilities\") pod \"certified-operators-8jlwk\" (UID: \"9a572a90-9417-4bac-a648-227d3f892664\") " pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.335303 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a572a90-9417-4bac-a648-227d3f892664-catalog-content\") pod \"certified-operators-8jlwk\" (UID: \"9a572a90-9417-4bac-a648-227d3f892664\") " pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.335361 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68r99\" (UniqueName: \"kubernetes.io/projected/9a572a90-9417-4bac-a648-227d3f892664-kube-api-access-68r99\") pod \"certified-operators-8jlwk\" (UID: \"9a572a90-9417-4bac-a648-227d3f892664\") " pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.436462 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a572a90-9417-4bac-a648-227d3f892664-catalog-content\") pod \"certified-operators-8jlwk\" (UID: \"9a572a90-9417-4bac-a648-227d3f892664\") " pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.436524 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68r99\" (UniqueName: \"kubernetes.io/projected/9a572a90-9417-4bac-a648-227d3f892664-kube-api-access-68r99\") pod \"certified-operators-8jlwk\" (UID: \"9a572a90-9417-4bac-a648-227d3f892664\") " pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.436548 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a572a90-9417-4bac-a648-227d3f892664-utilities\") pod \"certified-operators-8jlwk\" (UID: \"9a572a90-9417-4bac-a648-227d3f892664\") " pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.437145 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a572a90-9417-4bac-a648-227d3f892664-catalog-content\") pod \"certified-operators-8jlwk\" (UID: \"9a572a90-9417-4bac-a648-227d3f892664\") " pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.437246 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a572a90-9417-4bac-a648-227d3f892664-utilities\") pod \"certified-operators-8jlwk\" (UID: \"9a572a90-9417-4bac-a648-227d3f892664\") " pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.461929 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68r99\" (UniqueName: \"kubernetes.io/projected/9a572a90-9417-4bac-a648-227d3f892664-kube-api-access-68r99\") pod \"certified-operators-8jlwk\" (UID: \"9a572a90-9417-4bac-a648-227d3f892664\") " pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.522844 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.594327 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.598242 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.630953 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-5fttm" podStartSLOduration=2.630932552 podStartE2EDuration="2.630932552s" podCreationTimestamp="2026-01-22 14:09:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:09:25.620959436 +0000 UTC m=+380.613421294" watchObservedRunningTime="2026-01-22 14:09:25.630932552 +0000 UTC m=+380.623394400" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.808575 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6hplb"] Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.809716 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.813461 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.815837 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hplb"] Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.943150 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfba557d-e1ff-4acf-b373-1ac74e4754ee-catalog-content\") pod \"redhat-marketplace-6hplb\" (UID: \"cfba557d-e1ff-4acf-b373-1ac74e4754ee\") " pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.943220 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfba557d-e1ff-4acf-b373-1ac74e4754ee-utilities\") pod \"redhat-marketplace-6hplb\" (UID: \"cfba557d-e1ff-4acf-b373-1ac74e4754ee\") " pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.943245 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wswr7\" (UniqueName: \"kubernetes.io/projected/cfba557d-e1ff-4acf-b373-1ac74e4754ee-kube-api-access-wswr7\") pod \"redhat-marketplace-6hplb\" (UID: \"cfba557d-e1ff-4acf-b373-1ac74e4754ee\") " pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:25 crc kubenswrapper[5017]: I0122 14:09:25.979471 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8jlwk"] Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.045215 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfba557d-e1ff-4acf-b373-1ac74e4754ee-utilities\") pod \"redhat-marketplace-6hplb\" (UID: \"cfba557d-e1ff-4acf-b373-1ac74e4754ee\") " pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.045275 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wswr7\" (UniqueName: \"kubernetes.io/projected/cfba557d-e1ff-4acf-b373-1ac74e4754ee-kube-api-access-wswr7\") pod \"redhat-marketplace-6hplb\" (UID: \"cfba557d-e1ff-4acf-b373-1ac74e4754ee\") " pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.045363 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfba557d-e1ff-4acf-b373-1ac74e4754ee-catalog-content\") pod \"redhat-marketplace-6hplb\" (UID: \"cfba557d-e1ff-4acf-b373-1ac74e4754ee\") " pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.045740 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfba557d-e1ff-4acf-b373-1ac74e4754ee-utilities\") pod \"redhat-marketplace-6hplb\" (UID: \"cfba557d-e1ff-4acf-b373-1ac74e4754ee\") " pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.045819 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfba557d-e1ff-4acf-b373-1ac74e4754ee-catalog-content\") pod \"redhat-marketplace-6hplb\" (UID: \"cfba557d-e1ff-4acf-b373-1ac74e4754ee\") " pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.071107 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wswr7\" (UniqueName: \"kubernetes.io/projected/cfba557d-e1ff-4acf-b373-1ac74e4754ee-kube-api-access-wswr7\") pod \"redhat-marketplace-6hplb\" (UID: \"cfba557d-e1ff-4acf-b373-1ac74e4754ee\") " pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.139511 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.476549 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hplb"] Jan 22 14:09:26 crc kubenswrapper[5017]: W0122 14:09:26.487623 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfba557d_e1ff_4acf_b373_1ac74e4754ee.slice/crio-1ce6c4685f079af2afa53dc4da989c8d741bd56e298cb549dc9919ae7fb58267 WatchSource:0}: Error finding container 1ce6c4685f079af2afa53dc4da989c8d741bd56e298cb549dc9919ae7fb58267: Status 404 returned error can't find the container with id 1ce6c4685f079af2afa53dc4da989c8d741bd56e298cb549dc9919ae7fb58267 Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.494421 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.599592 5017 generic.go:334] "Generic (PLEG): container finished" podID="9a572a90-9417-4bac-a648-227d3f892664" containerID="8ffe3915cbf79c9e3b7d3b667a2c9b854eedc1af0f06f077ae40ae13c6a9746e" exitCode=0 Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.599697 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8jlwk" event={"ID":"9a572a90-9417-4bac-a648-227d3f892664","Type":"ContainerDied","Data":"8ffe3915cbf79c9e3b7d3b667a2c9b854eedc1af0f06f077ae40ae13c6a9746e"} Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.600053 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8jlwk" event={"ID":"9a572a90-9417-4bac-a648-227d3f892664","Type":"ContainerStarted","Data":"92318784a22f8b03bbcd2bb3795df344167b787a18d7880a77fd761549b81372"} Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.601173 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hplb" event={"ID":"cfba557d-e1ff-4acf-b373-1ac74e4754ee","Type":"ContainerStarted","Data":"1ce6c4685f079af2afa53dc4da989c8d741bd56e298cb549dc9919ae7fb58267"} Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.604454 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xhq9v" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.604815 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xhq9v" event={"ID":"194b80b4-3a8d-4530-8581-53cc45997936","Type":"ContainerDied","Data":"7bb9a2658afb67f765a657e1980fe840d5b4b7a5c9826124e5741e12681377eb"} Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.604863 5017 scope.go:117] "RemoveContainer" containerID="95dc172f8a13fbbe2eaad709a5d16d418f917d5c7375e7e6d5b4287ae404f5b2" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.619435 5017 scope.go:117] "RemoveContainer" containerID="4825ec292c4a5718f69e2c2a3f9f585aa369e6a2759c8229315f46c7d9b27e6c" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.639208 5017 scope.go:117] "RemoveContainer" containerID="47efa657e4829666b1fb21cd47362af62022fc56101ef604d54a0a4fe3657c90" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.652838 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6zt5\" (UniqueName: \"kubernetes.io/projected/194b80b4-3a8d-4530-8581-53cc45997936-kube-api-access-p6zt5\") pod \"194b80b4-3a8d-4530-8581-53cc45997936\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.652885 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-utilities\") pod \"194b80b4-3a8d-4530-8581-53cc45997936\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.652988 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-catalog-content\") pod \"194b80b4-3a8d-4530-8581-53cc45997936\" (UID: \"194b80b4-3a8d-4530-8581-53cc45997936\") " Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.653896 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-utilities" (OuterVolumeSpecName: "utilities") pod "194b80b4-3a8d-4530-8581-53cc45997936" (UID: "194b80b4-3a8d-4530-8581-53cc45997936"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.657608 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/194b80b4-3a8d-4530-8581-53cc45997936-kube-api-access-p6zt5" (OuterVolumeSpecName: "kube-api-access-p6zt5") pod "194b80b4-3a8d-4530-8581-53cc45997936" (UID: "194b80b4-3a8d-4530-8581-53cc45997936"). InnerVolumeSpecName "kube-api-access-p6zt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.755206 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6zt5\" (UniqueName: \"kubernetes.io/projected/194b80b4-3a8d-4530-8581-53cc45997936-kube-api-access-p6zt5\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.755241 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.764280 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "194b80b4-3a8d-4530-8581-53cc45997936" (UID: "194b80b4-3a8d-4530-8581-53cc45997936"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.856057 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/194b80b4-3a8d-4530-8581-53cc45997936-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.939545 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xhq9v"] Jan 22 14:09:26 crc kubenswrapper[5017]: I0122 14:09:26.945534 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xhq9v"] Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.271264 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="194b80b4-3a8d-4530-8581-53cc45997936" path="/var/lib/kubelet/pods/194b80b4-3a8d-4530-8581-53cc45997936/volumes" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.606730 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wd96l"] Jan 22 14:09:27 crc kubenswrapper[5017]: E0122 14:09:27.607144 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="194b80b4-3a8d-4530-8581-53cc45997936" containerName="registry-server" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.607162 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="194b80b4-3a8d-4530-8581-53cc45997936" containerName="registry-server" Jan 22 14:09:27 crc kubenswrapper[5017]: E0122 14:09:27.607180 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="194b80b4-3a8d-4530-8581-53cc45997936" containerName="extract-content" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.607188 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="194b80b4-3a8d-4530-8581-53cc45997936" containerName="extract-content" Jan 22 14:09:27 crc kubenswrapper[5017]: E0122 14:09:27.607199 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="194b80b4-3a8d-4530-8581-53cc45997936" containerName="extract-utilities" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.607209 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="194b80b4-3a8d-4530-8581-53cc45997936" containerName="extract-utilities" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.607332 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="194b80b4-3a8d-4530-8581-53cc45997936" containerName="registry-server" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.609379 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.611796 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.618405 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wd96l"] Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.621689 5017 generic.go:334] "Generic (PLEG): container finished" podID="cfba557d-e1ff-4acf-b373-1ac74e4754ee" containerID="6330e00f841fd470aa532de269f1dee049b75aed140d03e963ed1bf108cf8c1b" exitCode=0 Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.621771 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hplb" event={"ID":"cfba557d-e1ff-4acf-b373-1ac74e4754ee","Type":"ContainerDied","Data":"6330e00f841fd470aa532de269f1dee049b75aed140d03e963ed1bf108cf8c1b"} Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.666978 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jvhx\" (UniqueName: \"kubernetes.io/projected/0742b6c5-1b21-4e65-a1f2-1f50d1751e81-kube-api-access-4jvhx\") pod \"redhat-operators-wd96l\" (UID: \"0742b6c5-1b21-4e65-a1f2-1f50d1751e81\") " pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.667133 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0742b6c5-1b21-4e65-a1f2-1f50d1751e81-utilities\") pod \"redhat-operators-wd96l\" (UID: \"0742b6c5-1b21-4e65-a1f2-1f50d1751e81\") " pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.667201 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0742b6c5-1b21-4e65-a1f2-1f50d1751e81-catalog-content\") pod \"redhat-operators-wd96l\" (UID: \"0742b6c5-1b21-4e65-a1f2-1f50d1751e81\") " pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.768949 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0742b6c5-1b21-4e65-a1f2-1f50d1751e81-catalog-content\") pod \"redhat-operators-wd96l\" (UID: \"0742b6c5-1b21-4e65-a1f2-1f50d1751e81\") " pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.769314 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jvhx\" (UniqueName: \"kubernetes.io/projected/0742b6c5-1b21-4e65-a1f2-1f50d1751e81-kube-api-access-4jvhx\") pod \"redhat-operators-wd96l\" (UID: \"0742b6c5-1b21-4e65-a1f2-1f50d1751e81\") " pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.769492 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0742b6c5-1b21-4e65-a1f2-1f50d1751e81-utilities\") pod \"redhat-operators-wd96l\" (UID: \"0742b6c5-1b21-4e65-a1f2-1f50d1751e81\") " pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.769789 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0742b6c5-1b21-4e65-a1f2-1f50d1751e81-catalog-content\") pod \"redhat-operators-wd96l\" (UID: \"0742b6c5-1b21-4e65-a1f2-1f50d1751e81\") " pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.770389 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0742b6c5-1b21-4e65-a1f2-1f50d1751e81-utilities\") pod \"redhat-operators-wd96l\" (UID: \"0742b6c5-1b21-4e65-a1f2-1f50d1751e81\") " pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.790428 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jvhx\" (UniqueName: \"kubernetes.io/projected/0742b6c5-1b21-4e65-a1f2-1f50d1751e81-kube-api-access-4jvhx\") pod \"redhat-operators-wd96l\" (UID: \"0742b6c5-1b21-4e65-a1f2-1f50d1751e81\") " pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:27 crc kubenswrapper[5017]: I0122 14:09:27.938715 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.123429 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wd96l"] Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.207873 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ghrs9"] Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.209803 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.213393 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.214855 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ghrs9"] Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.277534 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5kzt\" (UniqueName: \"kubernetes.io/projected/24e6cfb8-c40c-4b03-873e-6d6ab15f1911-kube-api-access-x5kzt\") pod \"community-operators-ghrs9\" (UID: \"24e6cfb8-c40c-4b03-873e-6d6ab15f1911\") " pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.277604 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24e6cfb8-c40c-4b03-873e-6d6ab15f1911-utilities\") pod \"community-operators-ghrs9\" (UID: \"24e6cfb8-c40c-4b03-873e-6d6ab15f1911\") " pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.277675 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24e6cfb8-c40c-4b03-873e-6d6ab15f1911-catalog-content\") pod \"community-operators-ghrs9\" (UID: \"24e6cfb8-c40c-4b03-873e-6d6ab15f1911\") " pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.379564 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5kzt\" (UniqueName: \"kubernetes.io/projected/24e6cfb8-c40c-4b03-873e-6d6ab15f1911-kube-api-access-x5kzt\") pod \"community-operators-ghrs9\" (UID: \"24e6cfb8-c40c-4b03-873e-6d6ab15f1911\") " pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.379717 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24e6cfb8-c40c-4b03-873e-6d6ab15f1911-utilities\") pod \"community-operators-ghrs9\" (UID: \"24e6cfb8-c40c-4b03-873e-6d6ab15f1911\") " pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.379763 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24e6cfb8-c40c-4b03-873e-6d6ab15f1911-catalog-content\") pod \"community-operators-ghrs9\" (UID: \"24e6cfb8-c40c-4b03-873e-6d6ab15f1911\") " pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.380881 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24e6cfb8-c40c-4b03-873e-6d6ab15f1911-catalog-content\") pod \"community-operators-ghrs9\" (UID: \"24e6cfb8-c40c-4b03-873e-6d6ab15f1911\") " pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.380943 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24e6cfb8-c40c-4b03-873e-6d6ab15f1911-utilities\") pod \"community-operators-ghrs9\" (UID: \"24e6cfb8-c40c-4b03-873e-6d6ab15f1911\") " pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.400013 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5kzt\" (UniqueName: \"kubernetes.io/projected/24e6cfb8-c40c-4b03-873e-6d6ab15f1911-kube-api-access-x5kzt\") pod \"community-operators-ghrs9\" (UID: \"24e6cfb8-c40c-4b03-873e-6d6ab15f1911\") " pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.544320 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.632943 5017 generic.go:334] "Generic (PLEG): container finished" podID="0742b6c5-1b21-4e65-a1f2-1f50d1751e81" containerID="c13248a1be4a304dd239866c8563201715074f61d062d0c946a2c4a13675c85e" exitCode=0 Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.633022 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wd96l" event={"ID":"0742b6c5-1b21-4e65-a1f2-1f50d1751e81","Type":"ContainerDied","Data":"c13248a1be4a304dd239866c8563201715074f61d062d0c946a2c4a13675c85e"} Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.633342 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wd96l" event={"ID":"0742b6c5-1b21-4e65-a1f2-1f50d1751e81","Type":"ContainerStarted","Data":"c89290c50d85f424993813b5f173e9dff529e9e1ff121f782f2b1988a5952ac1"} Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.653810 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8jlwk" event={"ID":"9a572a90-9417-4bac-a648-227d3f892664","Type":"ContainerStarted","Data":"ebd0900c78f8cc84629939b2514731088cb70adcc7f55b9d6a5a2802d76ac171"} Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.678155 5017 generic.go:334] "Generic (PLEG): container finished" podID="cfba557d-e1ff-4acf-b373-1ac74e4754ee" containerID="9ac4820866795b8cae3a220aa39687398bfcc1f8d4d2838e21318fe5a9603379" exitCode=0 Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.678201 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hplb" event={"ID":"cfba557d-e1ff-4acf-b373-1ac74e4754ee","Type":"ContainerDied","Data":"9ac4820866795b8cae3a220aa39687398bfcc1f8d4d2838e21318fe5a9603379"} Jan 22 14:09:28 crc kubenswrapper[5017]: I0122 14:09:28.773622 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ghrs9"] Jan 22 14:09:28 crc kubenswrapper[5017]: W0122 14:09:28.799858 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24e6cfb8_c40c_4b03_873e_6d6ab15f1911.slice/crio-d21640ca33a39b083b60916eb8af7f54d050b076c150bb96ac4f341e3b255df5 WatchSource:0}: Error finding container d21640ca33a39b083b60916eb8af7f54d050b076c150bb96ac4f341e3b255df5: Status 404 returned error can't find the container with id d21640ca33a39b083b60916eb8af7f54d050b076c150bb96ac4f341e3b255df5 Jan 22 14:09:29 crc kubenswrapper[5017]: I0122 14:09:29.688589 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hplb" event={"ID":"cfba557d-e1ff-4acf-b373-1ac74e4754ee","Type":"ContainerStarted","Data":"17c298058434a04c80a2550e9510469ceb8762b2e9df83bdfe87ce9c01e4a234"} Jan 22 14:09:29 crc kubenswrapper[5017]: I0122 14:09:29.692402 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wd96l" event={"ID":"0742b6c5-1b21-4e65-a1f2-1f50d1751e81","Type":"ContainerStarted","Data":"fe372893b87db18edabad60cc25ec905fbcca7bc1b25e8b857099d636f71066a"} Jan 22 14:09:29 crc kubenswrapper[5017]: I0122 14:09:29.693881 5017 generic.go:334] "Generic (PLEG): container finished" podID="24e6cfb8-c40c-4b03-873e-6d6ab15f1911" containerID="f43cb3c116ef77cec42318329de5df436b10b9c4b69744947fa43c2712b8a3a0" exitCode=0 Jan 22 14:09:29 crc kubenswrapper[5017]: I0122 14:09:29.693937 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghrs9" event={"ID":"24e6cfb8-c40c-4b03-873e-6d6ab15f1911","Type":"ContainerDied","Data":"f43cb3c116ef77cec42318329de5df436b10b9c4b69744947fa43c2712b8a3a0"} Jan 22 14:09:29 crc kubenswrapper[5017]: I0122 14:09:29.693958 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghrs9" event={"ID":"24e6cfb8-c40c-4b03-873e-6d6ab15f1911","Type":"ContainerStarted","Data":"d21640ca33a39b083b60916eb8af7f54d050b076c150bb96ac4f341e3b255df5"} Jan 22 14:09:29 crc kubenswrapper[5017]: I0122 14:09:29.696649 5017 generic.go:334] "Generic (PLEG): container finished" podID="9a572a90-9417-4bac-a648-227d3f892664" containerID="ebd0900c78f8cc84629939b2514731088cb70adcc7f55b9d6a5a2802d76ac171" exitCode=0 Jan 22 14:09:29 crc kubenswrapper[5017]: I0122 14:09:29.696692 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8jlwk" event={"ID":"9a572a90-9417-4bac-a648-227d3f892664","Type":"ContainerDied","Data":"ebd0900c78f8cc84629939b2514731088cb70adcc7f55b9d6a5a2802d76ac171"} Jan 22 14:09:29 crc kubenswrapper[5017]: I0122 14:09:29.707754 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6hplb" podStartSLOduration=3.102562402 podStartE2EDuration="4.707733559s" podCreationTimestamp="2026-01-22 14:09:25 +0000 UTC" firstStartedPulling="2026-01-22 14:09:27.624605837 +0000 UTC m=+382.617067705" lastFinishedPulling="2026-01-22 14:09:29.229777004 +0000 UTC m=+384.222238862" observedRunningTime="2026-01-22 14:09:29.706820532 +0000 UTC m=+384.699282420" watchObservedRunningTime="2026-01-22 14:09:29.707733559 +0000 UTC m=+384.700195407" Jan 22 14:09:30 crc kubenswrapper[5017]: I0122 14:09:30.704498 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8jlwk" event={"ID":"9a572a90-9417-4bac-a648-227d3f892664","Type":"ContainerStarted","Data":"5c8dadf68108b6f1b88606a7b36b146c436d87b46ecaeb563f087f3d499d7923"} Jan 22 14:09:30 crc kubenswrapper[5017]: I0122 14:09:30.708149 5017 generic.go:334] "Generic (PLEG): container finished" podID="0742b6c5-1b21-4e65-a1f2-1f50d1751e81" containerID="fe372893b87db18edabad60cc25ec905fbcca7bc1b25e8b857099d636f71066a" exitCode=0 Jan 22 14:09:30 crc kubenswrapper[5017]: I0122 14:09:30.708568 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wd96l" event={"ID":"0742b6c5-1b21-4e65-a1f2-1f50d1751e81","Type":"ContainerDied","Data":"fe372893b87db18edabad60cc25ec905fbcca7bc1b25e8b857099d636f71066a"} Jan 22 14:09:30 crc kubenswrapper[5017]: I0122 14:09:30.725776 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8jlwk" podStartSLOduration=2.240560065 podStartE2EDuration="5.725758758s" podCreationTimestamp="2026-01-22 14:09:25 +0000 UTC" firstStartedPulling="2026-01-22 14:09:26.601669037 +0000 UTC m=+381.594130895" lastFinishedPulling="2026-01-22 14:09:30.08686773 +0000 UTC m=+385.079329588" observedRunningTime="2026-01-22 14:09:30.725520032 +0000 UTC m=+385.717981890" watchObservedRunningTime="2026-01-22 14:09:30.725758758 +0000 UTC m=+385.718220616" Jan 22 14:09:31 crc kubenswrapper[5017]: I0122 14:09:31.715615 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wd96l" event={"ID":"0742b6c5-1b21-4e65-a1f2-1f50d1751e81","Type":"ContainerStarted","Data":"4d9a7202ee0f3e87ba427f094ceb6f8ce35861dfdeaaa1a944313a13ed996237"} Jan 22 14:09:31 crc kubenswrapper[5017]: I0122 14:09:31.717938 5017 generic.go:334] "Generic (PLEG): container finished" podID="24e6cfb8-c40c-4b03-873e-6d6ab15f1911" containerID="59f088608c27f381fe2dc715168f2600515ef29a0315f40ed1fc3a2e10c40f25" exitCode=0 Jan 22 14:09:31 crc kubenswrapper[5017]: I0122 14:09:31.718027 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghrs9" event={"ID":"24e6cfb8-c40c-4b03-873e-6d6ab15f1911","Type":"ContainerDied","Data":"59f088608c27f381fe2dc715168f2600515ef29a0315f40ed1fc3a2e10c40f25"} Jan 22 14:09:31 crc kubenswrapper[5017]: I0122 14:09:31.740418 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wd96l" podStartSLOduration=2.240309016 podStartE2EDuration="4.740402582s" podCreationTimestamp="2026-01-22 14:09:27 +0000 UTC" firstStartedPulling="2026-01-22 14:09:28.634749722 +0000 UTC m=+383.627211580" lastFinishedPulling="2026-01-22 14:09:31.134843288 +0000 UTC m=+386.127305146" observedRunningTime="2026-01-22 14:09:31.734830602 +0000 UTC m=+386.727292470" watchObservedRunningTime="2026-01-22 14:09:31.740402582 +0000 UTC m=+386.732864440" Jan 22 14:09:32 crc kubenswrapper[5017]: I0122 14:09:32.727906 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghrs9" event={"ID":"24e6cfb8-c40c-4b03-873e-6d6ab15f1911","Type":"ContainerStarted","Data":"0355f9294cff7ff034b14f7a6f1e78c7b52147d68f6fe10747858afb3a412f13"} Jan 22 14:09:35 crc kubenswrapper[5017]: I0122 14:09:35.523923 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:35 crc kubenswrapper[5017]: I0122 14:09:35.525183 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:35 crc kubenswrapper[5017]: I0122 14:09:35.570723 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:35 crc kubenswrapper[5017]: I0122 14:09:35.589692 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ghrs9" podStartSLOduration=5.196776902 podStartE2EDuration="7.589674405s" podCreationTimestamp="2026-01-22 14:09:28 +0000 UTC" firstStartedPulling="2026-01-22 14:09:29.695338643 +0000 UTC m=+384.687800501" lastFinishedPulling="2026-01-22 14:09:32.088236146 +0000 UTC m=+387.080698004" observedRunningTime="2026-01-22 14:09:32.752535724 +0000 UTC m=+387.744997592" watchObservedRunningTime="2026-01-22 14:09:35.589674405 +0000 UTC m=+390.582136263" Jan 22 14:09:35 crc kubenswrapper[5017]: I0122 14:09:35.780937 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8jlwk" Jan 22 14:09:36 crc kubenswrapper[5017]: I0122 14:09:36.141261 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:36 crc kubenswrapper[5017]: I0122 14:09:36.141314 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:36 crc kubenswrapper[5017]: I0122 14:09:36.184132 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:36 crc kubenswrapper[5017]: I0122 14:09:36.790845 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6hplb" Jan 22 14:09:37 crc kubenswrapper[5017]: I0122 14:09:37.939684 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:37 crc kubenswrapper[5017]: I0122 14:09:37.939759 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:37 crc kubenswrapper[5017]: I0122 14:09:37.980732 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:38 crc kubenswrapper[5017]: I0122 14:09:38.544592 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:38 crc kubenswrapper[5017]: I0122 14:09:38.544768 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:38 crc kubenswrapper[5017]: I0122 14:09:38.581675 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:38 crc kubenswrapper[5017]: I0122 14:09:38.800565 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ghrs9" Jan 22 14:09:38 crc kubenswrapper[5017]: I0122 14:09:38.801535 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wd96l" Jan 22 14:09:47 crc kubenswrapper[5017]: I0122 14:09:47.290434 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" podUID="74271c15-b41a-4067-996a-3d24215d6eb8" containerName="registry" containerID="cri-o://02048b02d3af33cfc22adc8c3449a08a0f8eb17393b1ebf40899c4a7b1856575" gracePeriod=30 Jan 22 14:09:47 crc kubenswrapper[5017]: I0122 14:09:47.801404 5017 generic.go:334] "Generic (PLEG): container finished" podID="74271c15-b41a-4067-996a-3d24215d6eb8" containerID="02048b02d3af33cfc22adc8c3449a08a0f8eb17393b1ebf40899c4a7b1856575" exitCode=0 Jan 22 14:09:47 crc kubenswrapper[5017]: I0122 14:09:47.801503 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" event={"ID":"74271c15-b41a-4067-996a-3d24215d6eb8","Type":"ContainerDied","Data":"02048b02d3af33cfc22adc8c3449a08a0f8eb17393b1ebf40899c4a7b1856575"} Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.138948 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.287517 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74271c15-b41a-4067-996a-3d24215d6eb8-ca-trust-extracted\") pod \"74271c15-b41a-4067-996a-3d24215d6eb8\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.287572 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm72m\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-kube-api-access-pm72m\") pod \"74271c15-b41a-4067-996a-3d24215d6eb8\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.287595 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-trusted-ca\") pod \"74271c15-b41a-4067-996a-3d24215d6eb8\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.287845 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"74271c15-b41a-4067-996a-3d24215d6eb8\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.287894 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74271c15-b41a-4067-996a-3d24215d6eb8-installation-pull-secrets\") pod \"74271c15-b41a-4067-996a-3d24215d6eb8\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.287921 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-bound-sa-token\") pod \"74271c15-b41a-4067-996a-3d24215d6eb8\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.287980 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-registry-certificates\") pod \"74271c15-b41a-4067-996a-3d24215d6eb8\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.288008 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-registry-tls\") pod \"74271c15-b41a-4067-996a-3d24215d6eb8\" (UID: \"74271c15-b41a-4067-996a-3d24215d6eb8\") " Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.291999 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "74271c15-b41a-4067-996a-3d24215d6eb8" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.292082 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "74271c15-b41a-4067-996a-3d24215d6eb8" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.294881 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74271c15-b41a-4067-996a-3d24215d6eb8-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "74271c15-b41a-4067-996a-3d24215d6eb8" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.299322 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "74271c15-b41a-4067-996a-3d24215d6eb8" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.299453 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-kube-api-access-pm72m" (OuterVolumeSpecName: "kube-api-access-pm72m") pod "74271c15-b41a-4067-996a-3d24215d6eb8" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8"). InnerVolumeSpecName "kube-api-access-pm72m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.299690 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "74271c15-b41a-4067-996a-3d24215d6eb8" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.304660 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74271c15-b41a-4067-996a-3d24215d6eb8-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "74271c15-b41a-4067-996a-3d24215d6eb8" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.310675 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "74271c15-b41a-4067-996a-3d24215d6eb8" (UID: "74271c15-b41a-4067-996a-3d24215d6eb8"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.389677 5017 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74271c15-b41a-4067-996a-3d24215d6eb8-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.389713 5017 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.389722 5017 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.389732 5017 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.389742 5017 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74271c15-b41a-4067-996a-3d24215d6eb8-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.389752 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm72m\" (UniqueName: \"kubernetes.io/projected/74271c15-b41a-4067-996a-3d24215d6eb8-kube-api-access-pm72m\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.389759 5017 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74271c15-b41a-4067-996a-3d24215d6eb8-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.808465 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" event={"ID":"74271c15-b41a-4067-996a-3d24215d6eb8","Type":"ContainerDied","Data":"669eab79893b8ede47456e6aece8be2fa48cd1aeba9c8c258ea8e1839b142c94"} Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.808518 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tn54n" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.808535 5017 scope.go:117] "RemoveContainer" containerID="02048b02d3af33cfc22adc8c3449a08a0f8eb17393b1ebf40899c4a7b1856575" Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.836227 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tn54n"] Jan 22 14:09:48 crc kubenswrapper[5017]: I0122 14:09:48.840642 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tn54n"] Jan 22 14:09:49 crc kubenswrapper[5017]: I0122 14:09:49.264775 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74271c15-b41a-4067-996a-3d24215d6eb8" path="/var/lib/kubelet/pods/74271c15-b41a-4067-996a-3d24215d6eb8/volumes" Jan 22 14:09:52 crc kubenswrapper[5017]: I0122 14:09:52.884024 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:09:52 crc kubenswrapper[5017]: I0122 14:09:52.884391 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:10:22 crc kubenswrapper[5017]: I0122 14:10:22.883657 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:10:22 crc kubenswrapper[5017]: I0122 14:10:22.884507 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:10:22 crc kubenswrapper[5017]: I0122 14:10:22.884571 5017 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:10:22 crc kubenswrapper[5017]: I0122 14:10:22.885223 5017 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"558dea29af1da1143db23fb238318c80a09e247580c9fbabc7f06ab287879a19"} pod="openshift-machine-config-operator/machine-config-daemon-cr62h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:10:22 crc kubenswrapper[5017]: I0122 14:10:22.885291 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" containerID="cri-o://558dea29af1da1143db23fb238318c80a09e247580c9fbabc7f06ab287879a19" gracePeriod=600 Jan 22 14:10:24 crc kubenswrapper[5017]: I0122 14:10:24.004899 5017 generic.go:334] "Generic (PLEG): container finished" podID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerID="558dea29af1da1143db23fb238318c80a09e247580c9fbabc7f06ab287879a19" exitCode=0 Jan 22 14:10:24 crc kubenswrapper[5017]: I0122 14:10:24.005007 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerDied","Data":"558dea29af1da1143db23fb238318c80a09e247580c9fbabc7f06ab287879a19"} Jan 22 14:10:24 crc kubenswrapper[5017]: I0122 14:10:24.005454 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerStarted","Data":"282d7fb498351da0e6f85f32143410226094674654291915dfeb3a542bab8422"} Jan 22 14:10:24 crc kubenswrapper[5017]: I0122 14:10:24.005501 5017 scope.go:117] "RemoveContainer" containerID="dc9d5e963ba36e0fc4a9469dcf7f20d3b529c813b26cf3443c963011ce7507f5" Jan 22 14:12:05 crc kubenswrapper[5017]: I0122 14:12:05.438672 5017 scope.go:117] "RemoveContainer" containerID="d4fe4ac8eb6d48f705da75faab75aba7b850cb548a5005581e2c92192048eaf0" Jan 22 14:12:05 crc kubenswrapper[5017]: I0122 14:12:05.467917 5017 scope.go:117] "RemoveContainer" containerID="655ad5d27f6930521fa0a5421202b7b12e08666570f545cec10387040c9476c1" Jan 22 14:12:52 crc kubenswrapper[5017]: I0122 14:12:52.884747 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:12:52 crc kubenswrapper[5017]: I0122 14:12:52.886003 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:13:22 crc kubenswrapper[5017]: I0122 14:13:22.883989 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:13:22 crc kubenswrapper[5017]: I0122 14:13:22.885183 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:13:52 crc kubenswrapper[5017]: I0122 14:13:52.884572 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:13:52 crc kubenswrapper[5017]: I0122 14:13:52.885570 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:13:52 crc kubenswrapper[5017]: I0122 14:13:52.885641 5017 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:13:52 crc kubenswrapper[5017]: I0122 14:13:52.886969 5017 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"282d7fb498351da0e6f85f32143410226094674654291915dfeb3a542bab8422"} pod="openshift-machine-config-operator/machine-config-daemon-cr62h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:13:52 crc kubenswrapper[5017]: I0122 14:13:52.887030 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" containerID="cri-o://282d7fb498351da0e6f85f32143410226094674654291915dfeb3a542bab8422" gracePeriod=600 Jan 22 14:13:53 crc kubenswrapper[5017]: I0122 14:13:53.415825 5017 generic.go:334] "Generic (PLEG): container finished" podID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerID="282d7fb498351da0e6f85f32143410226094674654291915dfeb3a542bab8422" exitCode=0 Jan 22 14:13:53 crc kubenswrapper[5017]: I0122 14:13:53.415923 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerDied","Data":"282d7fb498351da0e6f85f32143410226094674654291915dfeb3a542bab8422"} Jan 22 14:13:53 crc kubenswrapper[5017]: I0122 14:13:53.415978 5017 scope.go:117] "RemoveContainer" containerID="558dea29af1da1143db23fb238318c80a09e247580c9fbabc7f06ab287879a19" Jan 22 14:13:54 crc kubenswrapper[5017]: I0122 14:13:54.422675 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerStarted","Data":"d5ba9ef9161473c8ce388bf5a9a68f3a95a5b876f5893ae82a825172bb9859d5"} Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.734220 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-thscs"] Jan 22 14:14:47 crc kubenswrapper[5017]: E0122 14:14:47.735428 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74271c15-b41a-4067-996a-3d24215d6eb8" containerName="registry" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.735450 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="74271c15-b41a-4067-996a-3d24215d6eb8" containerName="registry" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.735593 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="74271c15-b41a-4067-996a-3d24215d6eb8" containerName="registry" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.736339 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-thscs" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.739362 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.739882 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.741600 5017 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-9xgs9" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.753530 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-2bsnh"] Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.754550 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-2bsnh" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.758511 5017 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-6ph8r" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.762250 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-thscs"] Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.784400 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-8nlnj"] Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.785509 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-8nlnj" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.794867 5017 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-srbtb" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.796879 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-2bsnh"] Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.802374 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-8nlnj"] Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.913594 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsknh\" (UniqueName: \"kubernetes.io/projected/742ae349-26f7-4244-bcc9-3c7c2373a0b4-kube-api-access-vsknh\") pod \"cert-manager-webhook-687f57d79b-8nlnj\" (UID: \"742ae349-26f7-4244-bcc9-3c7c2373a0b4\") " pod="cert-manager/cert-manager-webhook-687f57d79b-8nlnj" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.913799 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sz59\" (UniqueName: \"kubernetes.io/projected/c134ddb1-8fd5-4e73-bb84-f48e269b59ab-kube-api-access-6sz59\") pod \"cert-manager-cainjector-cf98fcc89-thscs\" (UID: \"c134ddb1-8fd5-4e73-bb84-f48e269b59ab\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-thscs" Jan 22 14:14:47 crc kubenswrapper[5017]: I0122 14:14:47.913964 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfk4r\" (UniqueName: \"kubernetes.io/projected/ae5b9e89-dc5d-4b4d-aac7-c025f72ca407-kube-api-access-zfk4r\") pod \"cert-manager-858654f9db-2bsnh\" (UID: \"ae5b9e89-dc5d-4b4d-aac7-c025f72ca407\") " pod="cert-manager/cert-manager-858654f9db-2bsnh" Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.015608 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sz59\" (UniqueName: \"kubernetes.io/projected/c134ddb1-8fd5-4e73-bb84-f48e269b59ab-kube-api-access-6sz59\") pod \"cert-manager-cainjector-cf98fcc89-thscs\" (UID: \"c134ddb1-8fd5-4e73-bb84-f48e269b59ab\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-thscs" Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.015698 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfk4r\" (UniqueName: \"kubernetes.io/projected/ae5b9e89-dc5d-4b4d-aac7-c025f72ca407-kube-api-access-zfk4r\") pod \"cert-manager-858654f9db-2bsnh\" (UID: \"ae5b9e89-dc5d-4b4d-aac7-c025f72ca407\") " pod="cert-manager/cert-manager-858654f9db-2bsnh" Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.015775 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsknh\" (UniqueName: \"kubernetes.io/projected/742ae349-26f7-4244-bcc9-3c7c2373a0b4-kube-api-access-vsknh\") pod \"cert-manager-webhook-687f57d79b-8nlnj\" (UID: \"742ae349-26f7-4244-bcc9-3c7c2373a0b4\") " pod="cert-manager/cert-manager-webhook-687f57d79b-8nlnj" Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.041611 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sz59\" (UniqueName: \"kubernetes.io/projected/c134ddb1-8fd5-4e73-bb84-f48e269b59ab-kube-api-access-6sz59\") pod \"cert-manager-cainjector-cf98fcc89-thscs\" (UID: \"c134ddb1-8fd5-4e73-bb84-f48e269b59ab\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-thscs" Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.042462 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfk4r\" (UniqueName: \"kubernetes.io/projected/ae5b9e89-dc5d-4b4d-aac7-c025f72ca407-kube-api-access-zfk4r\") pod \"cert-manager-858654f9db-2bsnh\" (UID: \"ae5b9e89-dc5d-4b4d-aac7-c025f72ca407\") " pod="cert-manager/cert-manager-858654f9db-2bsnh" Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.048408 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsknh\" (UniqueName: \"kubernetes.io/projected/742ae349-26f7-4244-bcc9-3c7c2373a0b4-kube-api-access-vsknh\") pod \"cert-manager-webhook-687f57d79b-8nlnj\" (UID: \"742ae349-26f7-4244-bcc9-3c7c2373a0b4\") " pod="cert-manager/cert-manager-webhook-687f57d79b-8nlnj" Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.058221 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-thscs" Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.086269 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-2bsnh" Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.097191 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-8nlnj" Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.318622 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-thscs"] Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.330150 5017 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.374032 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-2bsnh"] Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.434516 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-8nlnj"] Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.805709 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-thscs" event={"ID":"c134ddb1-8fd5-4e73-bb84-f48e269b59ab","Type":"ContainerStarted","Data":"ea17bc0878e08f6cf14f067fd3990d8532616ec12775b547a4265477ef9919db"} Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.807134 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-2bsnh" event={"ID":"ae5b9e89-dc5d-4b4d-aac7-c025f72ca407","Type":"ContainerStarted","Data":"80f7f596ff4d858d8ef696dd7324267d93abcd40f3bc67ec512ce256688beb09"} Jan 22 14:14:48 crc kubenswrapper[5017]: I0122 14:14:48.808424 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-8nlnj" event={"ID":"742ae349-26f7-4244-bcc9-3c7c2373a0b4","Type":"ContainerStarted","Data":"ca512a6dd4293bf59757137e6f38a281c7a8a04dcb20995d5fc19cf529746012"} Jan 22 14:14:52 crc kubenswrapper[5017]: I0122 14:14:52.838817 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-2bsnh" event={"ID":"ae5b9e89-dc5d-4b4d-aac7-c025f72ca407","Type":"ContainerStarted","Data":"431ceddaa1f1fb2dbb48dbc9e73006f0e0e3b5213ab1aa965690d661149c3ed5"} Jan 22 14:14:52 crc kubenswrapper[5017]: I0122 14:14:52.840389 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-8nlnj" event={"ID":"742ae349-26f7-4244-bcc9-3c7c2373a0b4","Type":"ContainerStarted","Data":"41f9ae0083586f396e0954eb64b4f014af3518f7c2eda91f3fd97d9ccbee4477"} Jan 22 14:14:52 crc kubenswrapper[5017]: I0122 14:14:52.840507 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-8nlnj" Jan 22 14:14:52 crc kubenswrapper[5017]: I0122 14:14:52.841818 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-thscs" event={"ID":"c134ddb1-8fd5-4e73-bb84-f48e269b59ab","Type":"ContainerStarted","Data":"5d1404d22a2bcd2aeb126a2a2ccab5a4e516baf449be4ecfae35d8a873436903"} Jan 22 14:14:52 crc kubenswrapper[5017]: I0122 14:14:52.878602 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-2bsnh" podStartSLOduration=1.926841591 podStartE2EDuration="5.878579409s" podCreationTimestamp="2026-01-22 14:14:47 +0000 UTC" firstStartedPulling="2026-01-22 14:14:48.403394661 +0000 UTC m=+703.395856519" lastFinishedPulling="2026-01-22 14:14:52.355132479 +0000 UTC m=+707.347594337" observedRunningTime="2026-01-22 14:14:52.877589401 +0000 UTC m=+707.870051259" watchObservedRunningTime="2026-01-22 14:14:52.878579409 +0000 UTC m=+707.871041267" Jan 22 14:14:52 crc kubenswrapper[5017]: I0122 14:14:52.927826 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-8nlnj" podStartSLOduration=2.057228473 podStartE2EDuration="5.927798765s" podCreationTimestamp="2026-01-22 14:14:47 +0000 UTC" firstStartedPulling="2026-01-22 14:14:48.435641929 +0000 UTC m=+703.428103787" lastFinishedPulling="2026-01-22 14:14:52.306212181 +0000 UTC m=+707.298674079" observedRunningTime="2026-01-22 14:14:52.912305729 +0000 UTC m=+707.904767587" watchObservedRunningTime="2026-01-22 14:14:52.927798765 +0000 UTC m=+707.920260623" Jan 22 14:14:52 crc kubenswrapper[5017]: I0122 14:14:52.930064 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-thscs" podStartSLOduration=1.945699343 podStartE2EDuration="5.930050389s" podCreationTimestamp="2026-01-22 14:14:47 +0000 UTC" firstStartedPulling="2026-01-22 14:14:48.329756857 +0000 UTC m=+703.322218715" lastFinishedPulling="2026-01-22 14:14:52.314107863 +0000 UTC m=+707.306569761" observedRunningTime="2026-01-22 14:14:52.926437217 +0000 UTC m=+707.918899075" watchObservedRunningTime="2026-01-22 14:14:52.930050389 +0000 UTC m=+707.922512247" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.628236 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vgtf7"] Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.629245 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovn-controller" containerID="cri-o://af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78" gracePeriod=30 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.629334 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="nbdb" containerID="cri-o://bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60" gracePeriod=30 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.629428 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="sbdb" containerID="cri-o://a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482" gracePeriod=30 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.629451 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75" gracePeriod=30 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.629472 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="northd" containerID="cri-o://1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c" gracePeriod=30 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.629497 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="kube-rbac-proxy-node" containerID="cri-o://ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d" gracePeriod=30 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.629519 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovn-acl-logging" containerID="cri-o://fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9" gracePeriod=30 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.690612 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" containerID="cri-o://1d1097c8995866c82cfe80dc8db5fa80717f4591526ecd539d41108303fdab60" gracePeriod=30 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.881775 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2qmt_c223e7fe-8360-4731-a32d-3cf60ed7324b/kube-multus/2.log" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.882541 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2qmt_c223e7fe-8360-4731-a32d-3cf60ed7324b/kube-multus/1.log" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.882609 5017 generic.go:334] "Generic (PLEG): container finished" podID="c223e7fe-8360-4731-a32d-3cf60ed7324b" containerID="d04e65bff2218b2db1e8def603023e29a162b0a2a77092623bd04d9e54430afd" exitCode=2 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.882697 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2qmt" event={"ID":"c223e7fe-8360-4731-a32d-3cf60ed7324b","Type":"ContainerDied","Data":"d04e65bff2218b2db1e8def603023e29a162b0a2a77092623bd04d9e54430afd"} Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.882780 5017 scope.go:117] "RemoveContainer" containerID="5f97833420fc482dc5fa1d748caa12701d129b5081056d0a7f8c40050c64cf2f" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.884018 5017 scope.go:117] "RemoveContainer" containerID="d04e65bff2218b2db1e8def603023e29a162b0a2a77092623bd04d9e54430afd" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.884492 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-j2qmt_openshift-multus(c223e7fe-8360-4731-a32d-3cf60ed7324b)\"" pod="openshift-multus/multus-j2qmt" podUID="c223e7fe-8360-4731-a32d-3cf60ed7324b" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.894082 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/3.log" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.913567 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovn-acl-logging/0.log" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.915387 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovn-controller/0.log" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918062 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="1d1097c8995866c82cfe80dc8db5fa80717f4591526ecd539d41108303fdab60" exitCode=0 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918148 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482" exitCode=0 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918161 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60" exitCode=0 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918173 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c" exitCode=0 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918182 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75" exitCode=0 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918192 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d" exitCode=0 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918202 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9" exitCode=143 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918213 5017 generic.go:334] "Generic (PLEG): container finished" podID="43c64840-5206-4dec-834e-77e0562f19de" containerID="af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78" exitCode=143 Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918250 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"1d1097c8995866c82cfe80dc8db5fa80717f4591526ecd539d41108303fdab60"} Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918294 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482"} Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918309 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60"} Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918322 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c"} Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918336 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75"} Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918467 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d"} Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918484 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9"} Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.918499 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78"} Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.930312 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovnkube-controller/3.log" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.935210 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovn-acl-logging/0.log" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.935916 5017 scope.go:117] "RemoveContainer" containerID="20495ad9747c4b88b78329f029365528560cc625df54d8291d6873b4afec6222" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.936074 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovn-controller/0.log" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.936596 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989543 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-kubelet\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989628 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-node-log\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989655 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-etc-openvswitch\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989658 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989704 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-netns\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989754 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-env-overrides\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989754 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989753 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-node-log" (OuterVolumeSpecName: "node-log") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989791 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989775 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-var-lib-openvswitch\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989894 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989928 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989886 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-bin\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.989974 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-netd\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.990021 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-systemd\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.990047 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.990055 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-slash\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.990134 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-slash" (OuterVolumeSpecName: "host-slash") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.990132 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-var-lib-cni-networks-ovn-kubernetes\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.990162 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.990408 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.990968 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/43c64840-5206-4dec-834e-77e0562f19de-ovn-node-metrics-cert\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991007 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lx5q\" (UniqueName: \"kubernetes.io/projected/43c64840-5206-4dec-834e-77e0562f19de-kube-api-access-6lx5q\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991054 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-ovn\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991080 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-ovn-kubernetes\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991099 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-config\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991137 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-script-lib\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991131 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991191 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-log-socket\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991179 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991210 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-openvswitch\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991235 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991332 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-systemd-units\") pod \"43c64840-5206-4dec-834e-77e0562f19de\" (UID: \"43c64840-5206-4dec-834e-77e0562f19de\") " Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991676 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991682 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-log-socket" (OuterVolumeSpecName: "log-socket") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991725 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.991747 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992297 5017 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992321 5017 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-node-log\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992335 5017 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992349 5017 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992363 5017 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992378 5017 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992392 5017 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992406 5017 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992420 5017 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-slash\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992436 5017 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992453 5017 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992468 5017 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992482 5017 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992495 5017 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/43c64840-5206-4dec-834e-77e0562f19de-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992508 5017 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-log-socket\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992522 5017 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.992535 5017 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994316 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-565xb"] Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994636 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994656 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994667 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="sbdb" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994675 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="sbdb" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994685 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994693 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994702 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="kube-rbac-proxy-node" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994708 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="kube-rbac-proxy-node" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994718 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovn-acl-logging" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994726 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovn-acl-logging" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994740 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="nbdb" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994748 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="nbdb" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994760 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="kubecfg-setup" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994770 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="kubecfg-setup" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994782 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994789 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994799 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994805 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994813 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovn-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994820 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovn-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.994834 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="northd" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994842 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="northd" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994958 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994972 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994981 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994988 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="kube-rbac-proxy-node" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.994999 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="sbdb" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.995006 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovn-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.995013 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovn-acl-logging" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.995022 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="nbdb" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.995032 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.995040 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="northd" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.995048 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.995186 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.995196 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: E0122 14:14:57.995208 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.995215 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.995324 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c64840-5206-4dec-834e-77e0562f19de" containerName="ovnkube-controller" Jan 22 14:14:57 crc kubenswrapper[5017]: I0122 14:14:57.997380 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.002523 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43c64840-5206-4dec-834e-77e0562f19de-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.003818 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43c64840-5206-4dec-834e-77e0562f19de-kube-api-access-6lx5q" (OuterVolumeSpecName: "kube-api-access-6lx5q") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "kube-api-access-6lx5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.008791 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "43c64840-5206-4dec-834e-77e0562f19de" (UID: "43c64840-5206-4dec-834e-77e0562f19de"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.094581 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-etc-openvswitch\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.094665 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-systemd-units\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.094697 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfdc4edd-e91a-491a-944d-b2735bd61a2f-ovnkube-config\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.094861 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-node-log\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.094933 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-run-netns\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.094977 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfdc4edd-e91a-491a-944d-b2735bd61a2f-ovn-node-metrics-cert\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095011 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfdc4edd-e91a-491a-944d-b2735bd61a2f-ovnkube-script-lib\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095190 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-var-lib-openvswitch\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095254 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-run-ovn\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095292 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vm89\" (UniqueName: \"kubernetes.io/projected/bfdc4edd-e91a-491a-944d-b2735bd61a2f-kube-api-access-5vm89\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095323 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-run-systemd\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095344 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-cni-bin\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095373 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-kubelet\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095393 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfdc4edd-e91a-491a-944d-b2735bd61a2f-env-overrides\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095433 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-log-socket\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095453 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-slash\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095502 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-cni-netd\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095547 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-run-openvswitch\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095624 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-run-ovn-kubernetes\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095655 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095784 5017 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/43c64840-5206-4dec-834e-77e0562f19de-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095843 5017 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/43c64840-5206-4dec-834e-77e0562f19de-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.095867 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lx5q\" (UniqueName: \"kubernetes.io/projected/43c64840-5206-4dec-834e-77e0562f19de-kube-api-access-6lx5q\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.101370 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-8nlnj" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197405 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-kubelet\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197479 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfdc4edd-e91a-491a-944d-b2735bd61a2f-env-overrides\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197532 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-log-socket\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197557 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-slash\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197587 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-cni-netd\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197613 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-log-socket\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197638 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-slash\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197548 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-kubelet\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197684 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-run-openvswitch\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197692 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-cni-netd\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197617 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-run-openvswitch\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197880 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-run-ovn-kubernetes\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197926 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.197999 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-run-ovn-kubernetes\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198028 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-etc-openvswitch\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198060 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198072 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-systemd-units\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198098 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfdc4edd-e91a-491a-944d-b2735bd61a2f-ovnkube-config\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198101 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-etc-openvswitch\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198166 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-systemd-units\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198183 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-node-log\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198218 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-run-netns\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198257 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfdc4edd-e91a-491a-944d-b2735bd61a2f-ovn-node-metrics-cert\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198286 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfdc4edd-e91a-491a-944d-b2735bd61a2f-ovnkube-script-lib\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198343 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-var-lib-openvswitch\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198375 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-run-ovn\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198401 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vm89\" (UniqueName: \"kubernetes.io/projected/bfdc4edd-e91a-491a-944d-b2735bd61a2f-kube-api-access-5vm89\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198420 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-run-systemd\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198448 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-cni-bin\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198286 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-node-log\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198492 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-var-lib-openvswitch\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198638 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-run-ovn\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198680 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-cni-bin\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.198760 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-run-systemd\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.199241 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfdc4edd-e91a-491a-944d-b2735bd61a2f-host-run-netns\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.199250 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfdc4edd-e91a-491a-944d-b2735bd61a2f-ovnkube-config\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.199427 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfdc4edd-e91a-491a-944d-b2735bd61a2f-env-overrides\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.199654 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfdc4edd-e91a-491a-944d-b2735bd61a2f-ovnkube-script-lib\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.204836 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfdc4edd-e91a-491a-944d-b2735bd61a2f-ovn-node-metrics-cert\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.217409 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vm89\" (UniqueName: \"kubernetes.io/projected/bfdc4edd-e91a-491a-944d-b2735bd61a2f-kube-api-access-5vm89\") pod \"ovnkube-node-565xb\" (UID: \"bfdc4edd-e91a-491a-944d-b2735bd61a2f\") " pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:58 crc kubenswrapper[5017]: I0122 14:14:58.314781 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.926930 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovn-acl-logging/0.log" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.927706 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vgtf7_43c64840-5206-4dec-834e-77e0562f19de/ovn-controller/0.log" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.928253 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" event={"ID":"43c64840-5206-4dec-834e-77e0562f19de","Type":"ContainerDied","Data":"52a774bd5b6e86138690c2dfd67d451bcb9f98fad127a3125411c537377ceb67"} Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.928316 5017 scope.go:117] "RemoveContainer" containerID="1d1097c8995866c82cfe80dc8db5fa80717f4591526ecd539d41108303fdab60" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.928354 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vgtf7" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.935901 5017 generic.go:334] "Generic (PLEG): container finished" podID="bfdc4edd-e91a-491a-944d-b2735bd61a2f" containerID="0cf846cee88869c4c3103abc46d554e2e9615eed8327edb5717bdeaa10ddf5de" exitCode=0 Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.935956 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" event={"ID":"bfdc4edd-e91a-491a-944d-b2735bd61a2f","Type":"ContainerDied","Data":"0cf846cee88869c4c3103abc46d554e2e9615eed8327edb5717bdeaa10ddf5de"} Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.936029 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" event={"ID":"bfdc4edd-e91a-491a-944d-b2735bd61a2f","Type":"ContainerStarted","Data":"b16daa3494dda940cbf283504e63909c9b1783cadaf2f77551f4484dfcb113e8"} Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.938528 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2qmt_c223e7fe-8360-4731-a32d-3cf60ed7324b/kube-multus/2.log" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.948973 5017 scope.go:117] "RemoveContainer" containerID="a82f1f421fb1dacb27a65981ee018a5c55f0d5966ead70963ee2435f43a14482" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.969003 5017 scope.go:117] "RemoveContainer" containerID="bbe9b9c90b955db9817dbc5d49e439d58f501d06e394ce200159873c5ada7b60" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:58.985651 5017 scope.go:117] "RemoveContainer" containerID="1b94e5ee3ffaf8cf3bc05fa3a5987cd1efaf8addad894f461d260df51be0df4c" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:59.011837 5017 scope.go:117] "RemoveContainer" containerID="2267f585f47582b70a4881d040fac6e645825ee937285f2915eb9171cdc43b75" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:59.024828 5017 scope.go:117] "RemoveContainer" containerID="ef4f55441c5b2b98b2f24fcee2fdc9391ab73437c0442b4b868b7f3c4c6df55d" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:59.041768 5017 scope.go:117] "RemoveContainer" containerID="fbb0aca22cad074713d9fa0ca40e5a95e6ada3bf6db81566fb5ac2efce48eac9" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:59.058025 5017 scope.go:117] "RemoveContainer" containerID="af75b4a3080660a953603e205c9c3d819887ce4d3bbc5feeca9181f98ec4be78" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:59.076265 5017 scope.go:117] "RemoveContainer" containerID="ac2598dc20d7acc7fabea05acad299d0fba6e8f0d66dcb898b109893ca7a9c10" Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:59.291530 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vgtf7"] Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:59.295871 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vgtf7"] Jan 22 14:14:59 crc kubenswrapper[5017]: I0122 14:14:59.947775 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" event={"ID":"bfdc4edd-e91a-491a-944d-b2735bd61a2f","Type":"ContainerStarted","Data":"6ca842953261fdfc496a96fd2bf32a439659ffc36e8071801eb8f49119132454"} Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.170430 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k"] Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.171666 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.174705 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.176924 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.227713 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ced57ded-4843-4a2f-9c43-862df6149a61-config-volume\") pod \"collect-profiles-29484855-2lp9k\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.227820 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ced57ded-4843-4a2f-9c43-862df6149a61-secret-volume\") pod \"collect-profiles-29484855-2lp9k\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.227858 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m772b\" (UniqueName: \"kubernetes.io/projected/ced57ded-4843-4a2f-9c43-862df6149a61-kube-api-access-m772b\") pod \"collect-profiles-29484855-2lp9k\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.329026 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ced57ded-4843-4a2f-9c43-862df6149a61-config-volume\") pod \"collect-profiles-29484855-2lp9k\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.329083 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ced57ded-4843-4a2f-9c43-862df6149a61-secret-volume\") pod \"collect-profiles-29484855-2lp9k\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.329128 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m772b\" (UniqueName: \"kubernetes.io/projected/ced57ded-4843-4a2f-9c43-862df6149a61-kube-api-access-m772b\") pod \"collect-profiles-29484855-2lp9k\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.330298 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ced57ded-4843-4a2f-9c43-862df6149a61-config-volume\") pod \"collect-profiles-29484855-2lp9k\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.352282 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ced57ded-4843-4a2f-9c43-862df6149a61-secret-volume\") pod \"collect-profiles-29484855-2lp9k\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.358514 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m772b\" (UniqueName: \"kubernetes.io/projected/ced57ded-4843-4a2f-9c43-862df6149a61-kube-api-access-m772b\") pod \"collect-profiles-29484855-2lp9k\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.496239 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: E0122 14:15:00.532339 5017 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(7ecede4fd4ceb03c1caa5280006f5bb7b098acf9df573916023ee0b2153db1bc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 14:15:00 crc kubenswrapper[5017]: E0122 14:15:00.532450 5017 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(7ecede4fd4ceb03c1caa5280006f5bb7b098acf9df573916023ee0b2153db1bc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: E0122 14:15:00.532486 5017 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(7ecede4fd4ceb03c1caa5280006f5bb7b098acf9df573916023ee0b2153db1bc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:00 crc kubenswrapper[5017]: E0122 14:15:00.532565 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager(ced57ded-4843-4a2f-9c43-862df6149a61)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager(ced57ded-4843-4a2f-9c43-862df6149a61)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(7ecede4fd4ceb03c1caa5280006f5bb7b098acf9df573916023ee0b2153db1bc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" podUID="ced57ded-4843-4a2f-9c43-862df6149a61" Jan 22 14:15:00 crc kubenswrapper[5017]: I0122 14:15:00.960273 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" event={"ID":"bfdc4edd-e91a-491a-944d-b2735bd61a2f","Type":"ContainerStarted","Data":"5a51e85c5b8320b9e41d0580ab8a0ac0847df38840dc4f5c01152ab8f1052d23"} Jan 22 14:15:01 crc kubenswrapper[5017]: I0122 14:15:01.268996 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43c64840-5206-4dec-834e-77e0562f19de" path="/var/lib/kubelet/pods/43c64840-5206-4dec-834e-77e0562f19de/volumes" Jan 22 14:15:01 crc kubenswrapper[5017]: I0122 14:15:01.970572 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" event={"ID":"bfdc4edd-e91a-491a-944d-b2735bd61a2f","Type":"ContainerStarted","Data":"7c9cbe3360408f53393b52a6726c7a8488d489cab73146a3d193fffbd4d4b3c3"} Jan 22 14:15:01 crc kubenswrapper[5017]: I0122 14:15:01.970645 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" event={"ID":"bfdc4edd-e91a-491a-944d-b2735bd61a2f","Type":"ContainerStarted","Data":"72bb14f2da9e9207e8249811aff4ccb5228b293fedf66b1fdd5c97dbf7ba7358"} Jan 22 14:15:02 crc kubenswrapper[5017]: I0122 14:15:02.979521 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" event={"ID":"bfdc4edd-e91a-491a-944d-b2735bd61a2f","Type":"ContainerStarted","Data":"5396ede8410a4c5ff977a74c82329c2d08f97cd170ba3ed807e760880898b828"} Jan 22 14:15:02 crc kubenswrapper[5017]: I0122 14:15:02.980004 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" event={"ID":"bfdc4edd-e91a-491a-944d-b2735bd61a2f","Type":"ContainerStarted","Data":"b6647eaa5143ce97c4c494785e3e2bf04d29b2f5a0513d10c0ad3f8a0f2b1a38"} Jan 22 14:15:04 crc kubenswrapper[5017]: I0122 14:15:04.998326 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" event={"ID":"bfdc4edd-e91a-491a-944d-b2735bd61a2f","Type":"ContainerStarted","Data":"e8e793b5e224c1d93d0f16064eb40eebe1691e656de4c8e159355cd05ea9b7fb"} Jan 22 14:15:07 crc kubenswrapper[5017]: I0122 14:15:07.019096 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" event={"ID":"bfdc4edd-e91a-491a-944d-b2735bd61a2f","Type":"ContainerStarted","Data":"6d18b9584842790fc4a9b8ea4bc12f4f03b4f8b947314b071efdae9cec82fa3c"} Jan 22 14:15:07 crc kubenswrapper[5017]: I0122 14:15:07.020296 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:15:07 crc kubenswrapper[5017]: I0122 14:15:07.020467 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:15:07 crc kubenswrapper[5017]: I0122 14:15:07.020585 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:15:07 crc kubenswrapper[5017]: I0122 14:15:07.053920 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:15:07 crc kubenswrapper[5017]: I0122 14:15:07.054099 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:15:07 crc kubenswrapper[5017]: I0122 14:15:07.070701 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" podStartSLOduration=10.070663192 podStartE2EDuration="10.070663192s" podCreationTimestamp="2026-01-22 14:14:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:15:07.058473238 +0000 UTC m=+722.050935106" watchObservedRunningTime="2026-01-22 14:15:07.070663192 +0000 UTC m=+722.063125050" Jan 22 14:15:07 crc kubenswrapper[5017]: I0122 14:15:07.828436 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k"] Jan 22 14:15:07 crc kubenswrapper[5017]: I0122 14:15:07.828636 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:07 crc kubenswrapper[5017]: I0122 14:15:07.829275 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:07 crc kubenswrapper[5017]: E0122 14:15:07.863388 5017 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(524ed108da2f0f6a588e2b4b06f0c3d30ad101a1efad92e7c85edf21b7c3698a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 14:15:07 crc kubenswrapper[5017]: E0122 14:15:07.863498 5017 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(524ed108da2f0f6a588e2b4b06f0c3d30ad101a1efad92e7c85edf21b7c3698a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:07 crc kubenswrapper[5017]: E0122 14:15:07.863536 5017 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(524ed108da2f0f6a588e2b4b06f0c3d30ad101a1efad92e7c85edf21b7c3698a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:07 crc kubenswrapper[5017]: E0122 14:15:07.863623 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager(ced57ded-4843-4a2f-9c43-862df6149a61)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager(ced57ded-4843-4a2f-9c43-862df6149a61)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(524ed108da2f0f6a588e2b4b06f0c3d30ad101a1efad92e7c85edf21b7c3698a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" podUID="ced57ded-4843-4a2f-9c43-862df6149a61" Jan 22 14:15:11 crc kubenswrapper[5017]: I0122 14:15:11.258022 5017 scope.go:117] "RemoveContainer" containerID="d04e65bff2218b2db1e8def603023e29a162b0a2a77092623bd04d9e54430afd" Jan 22 14:15:11 crc kubenswrapper[5017]: E0122 14:15:11.259193 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-j2qmt_openshift-multus(c223e7fe-8360-4731-a32d-3cf60ed7324b)\"" pod="openshift-multus/multus-j2qmt" podUID="c223e7fe-8360-4731-a32d-3cf60ed7324b" Jan 22 14:15:21 crc kubenswrapper[5017]: I0122 14:15:21.258536 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:21 crc kubenswrapper[5017]: I0122 14:15:21.259723 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:21 crc kubenswrapper[5017]: E0122 14:15:21.306246 5017 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(66611ddd068b675ffb5c165720ca5cf5cd6fe6a8c9b832728e1288285767b2c3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 14:15:21 crc kubenswrapper[5017]: E0122 14:15:21.306933 5017 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(66611ddd068b675ffb5c165720ca5cf5cd6fe6a8c9b832728e1288285767b2c3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:21 crc kubenswrapper[5017]: E0122 14:15:21.306998 5017 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(66611ddd068b675ffb5c165720ca5cf5cd6fe6a8c9b832728e1288285767b2c3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:21 crc kubenswrapper[5017]: E0122 14:15:21.307103 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager(ced57ded-4843-4a2f-9c43-862df6149a61)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager(ced57ded-4843-4a2f-9c43-862df6149a61)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29484855-2lp9k_openshift-operator-lifecycle-manager_ced57ded-4843-4a2f-9c43-862df6149a61_0(66611ddd068b675ffb5c165720ca5cf5cd6fe6a8c9b832728e1288285767b2c3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" podUID="ced57ded-4843-4a2f-9c43-862df6149a61" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.152008 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph"] Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.153543 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.156274 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.156680 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.156925 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-wwpj7" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.242508 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/401cb975-04a3-4410-9163-2f691a919b6d-log\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.242938 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/401cb975-04a3-4410-9163-2f691a919b6d-run\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.243219 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/401cb975-04a3-4410-9163-2f691a919b6d-data\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.243378 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv5mz\" (UniqueName: \"kubernetes.io/projected/401cb975-04a3-4410-9163-2f691a919b6d-kube-api-access-wv5mz\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.344162 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/401cb975-04a3-4410-9163-2f691a919b6d-data\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.344255 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv5mz\" (UniqueName: \"kubernetes.io/projected/401cb975-04a3-4410-9163-2f691a919b6d-kube-api-access-wv5mz\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.344331 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/401cb975-04a3-4410-9163-2f691a919b6d-log\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.344369 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/401cb975-04a3-4410-9163-2f691a919b6d-run\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.344999 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/401cb975-04a3-4410-9163-2f691a919b6d-log\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.345183 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/401cb975-04a3-4410-9163-2f691a919b6d-run\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.346071 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/401cb975-04a3-4410-9163-2f691a919b6d-data\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.374472 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv5mz\" (UniqueName: \"kubernetes.io/projected/401cb975-04a3-4410-9163-2f691a919b6d-kube-api-access-wv5mz\") pod \"ceph\" (UID: \"401cb975-04a3-4410-9163-2f691a919b6d\") " pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: I0122 14:15:23.495041 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph" Jan 22 14:15:23 crc kubenswrapper[5017]: W0122 14:15:23.522334 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod401cb975_04a3_4410_9163_2f691a919b6d.slice/crio-7096a8e7b1a09d4ba9e7284c4f4c9cdaaf675c97fca2a101d3c26e7fd294e8c8 WatchSource:0}: Error finding container 7096a8e7b1a09d4ba9e7284c4f4c9cdaaf675c97fca2a101d3c26e7fd294e8c8: Status 404 returned error can't find the container with id 7096a8e7b1a09d4ba9e7284c4f4c9cdaaf675c97fca2a101d3c26e7fd294e8c8 Jan 22 14:15:23 crc kubenswrapper[5017]: E0122 14:15:23.654018 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:23 crc kubenswrapper[5017]: E0122 14:15:23.670679 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:24 crc kubenswrapper[5017]: I0122 14:15:24.136806 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph" event={"ID":"401cb975-04a3-4410-9163-2f691a919b6d","Type":"ContainerStarted","Data":"7096a8e7b1a09d4ba9e7284c4f4c9cdaaf675c97fca2a101d3c26e7fd294e8c8"} Jan 22 14:15:24 crc kubenswrapper[5017]: I0122 14:15:24.258051 5017 scope.go:117] "RemoveContainer" containerID="d04e65bff2218b2db1e8def603023e29a162b0a2a77092623bd04d9e54430afd" Jan 22 14:15:24 crc kubenswrapper[5017]: E0122 14:15:24.836989 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:24 crc kubenswrapper[5017]: E0122 14:15:24.859891 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:26 crc kubenswrapper[5017]: E0122 14:15:26.028424 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:26 crc kubenswrapper[5017]: E0122 14:15:26.041160 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:26 crc kubenswrapper[5017]: I0122 14:15:26.157807 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j2qmt_c223e7fe-8360-4731-a32d-3cf60ed7324b/kube-multus/2.log" Jan 22 14:15:26 crc kubenswrapper[5017]: I0122 14:15:26.157907 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j2qmt" event={"ID":"c223e7fe-8360-4731-a32d-3cf60ed7324b","Type":"ContainerStarted","Data":"640ad5cdb9fcfb2c43e7d933af584b0a4c0163aaedeabb9b8fd73c18a9abf3d8"} Jan 22 14:15:27 crc kubenswrapper[5017]: E0122 14:15:27.232675 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:27 crc kubenswrapper[5017]: E0122 14:15:27.250421 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:28 crc kubenswrapper[5017]: I0122 14:15:28.348034 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-565xb" Jan 22 14:15:28 crc kubenswrapper[5017]: E0122 14:15:28.419930 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:28 crc kubenswrapper[5017]: E0122 14:15:28.443083 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:29 crc kubenswrapper[5017]: E0122 14:15:29.586826 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:29 crc kubenswrapper[5017]: E0122 14:15:29.602889 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:30 crc kubenswrapper[5017]: E0122 14:15:30.805542 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:30 crc kubenswrapper[5017]: E0122 14:15:30.822699 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:32 crc kubenswrapper[5017]: E0122 14:15:32.032266 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:32 crc kubenswrapper[5017]: E0122 14:15:32.053039 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:33 crc kubenswrapper[5017]: E0122 14:15:33.221490 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:33 crc kubenswrapper[5017]: E0122 14:15:33.238775 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:34 crc kubenswrapper[5017]: E0122 14:15:34.443627 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:34 crc kubenswrapper[5017]: E0122 14:15:34.459856 5017 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=1978005060822482527, SKID=, AKID=35:50:B5:6C:22:59:00:DF:58:0B:4B:D4:E1:2E:FD:76:AB:41:27:C2 failed: x509: certificate signed by unknown authority" Jan 22 14:15:35 crc kubenswrapper[5017]: I0122 14:15:35.266460 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:35 crc kubenswrapper[5017]: I0122 14:15:35.266899 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:35 crc kubenswrapper[5017]: I0122 14:15:35.409882 5017 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 22 14:15:54 crc kubenswrapper[5017]: E0122 14:15:54.034006 5017 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/ceph/demo:latest-squid" Jan 22 14:15:54 crc kubenswrapper[5017]: E0122 14:15:54.035124 5017 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceph,Image:quay.io/ceph/demo:latest-squid,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:MON_IP,Value:192.168.126.11,ValueFrom:nil,},EnvVar{Name:CEPH_DAEMON,Value:demo,ValueFrom:nil,},EnvVar{Name:CEPH_PUBLIC_NETWORK,Value:0.0.0.0/0,ValueFrom:nil,},EnvVar{Name:DEMO_DAEMONS,Value:osd,mds,rgw,ValueFrom:nil,},EnvVar{Name:CEPH_DEMO_UID,Value:0,ValueFrom:nil,},EnvVar{Name:RGW_NAME,Value:ceph,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:data,ReadOnly:false,MountPath:/var/lib/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log,ReadOnly:false,MountPath:/var/log/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run,ReadOnly:false,MountPath:/run/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wv5mz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceph_openstack(401cb975-04a3-4410-9163-2f691a919b6d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 14:15:54 crc kubenswrapper[5017]: E0122 14:15:54.036298 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceph\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceph" podUID="401cb975-04a3-4410-9163-2f691a919b6d" Jan 22 14:15:54 crc kubenswrapper[5017]: I0122 14:15:54.149338 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k"] Jan 22 14:15:54 crc kubenswrapper[5017]: I0122 14:15:54.514846 5017 generic.go:334] "Generic (PLEG): container finished" podID="ced57ded-4843-4a2f-9c43-862df6149a61" containerID="062eb4ec67a4b7a70315dafa503beb7ef6edc9a608020973e9ad631261181752" exitCode=0 Jan 22 14:15:54 crc kubenswrapper[5017]: I0122 14:15:54.514958 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" event={"ID":"ced57ded-4843-4a2f-9c43-862df6149a61","Type":"ContainerDied","Data":"062eb4ec67a4b7a70315dafa503beb7ef6edc9a608020973e9ad631261181752"} Jan 22 14:15:54 crc kubenswrapper[5017]: I0122 14:15:54.515395 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" event={"ID":"ced57ded-4843-4a2f-9c43-862df6149a61","Type":"ContainerStarted","Data":"1f7f37d0a8b9b517452fcb89a15ca9278798cc2ff4e3dd6b408b2604271ee276"} Jan 22 14:15:54 crc kubenswrapper[5017]: E0122 14:15:54.517635 5017 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceph\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/ceph/demo:latest-squid\\\"\"" pod="openstack/ceph" podUID="401cb975-04a3-4410-9163-2f691a919b6d" Jan 22 14:15:54 crc kubenswrapper[5017]: E0122 14:15:54.517654 5017 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podced57ded_4843_4a2f_9c43_862df6149a61.slice/crio-062eb4ec67a4b7a70315dafa503beb7ef6edc9a608020973e9ad631261181752.scope\": RecentStats: unable to find data in memory cache]" Jan 22 14:15:55 crc kubenswrapper[5017]: I0122 14:15:55.885980 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:15:55 crc kubenswrapper[5017]: I0122 14:15:55.964332 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m772b\" (UniqueName: \"kubernetes.io/projected/ced57ded-4843-4a2f-9c43-862df6149a61-kube-api-access-m772b\") pod \"ced57ded-4843-4a2f-9c43-862df6149a61\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " Jan 22 14:15:55 crc kubenswrapper[5017]: I0122 14:15:55.964410 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ced57ded-4843-4a2f-9c43-862df6149a61-config-volume\") pod \"ced57ded-4843-4a2f-9c43-862df6149a61\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " Jan 22 14:15:55 crc kubenswrapper[5017]: I0122 14:15:55.964518 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ced57ded-4843-4a2f-9c43-862df6149a61-secret-volume\") pod \"ced57ded-4843-4a2f-9c43-862df6149a61\" (UID: \"ced57ded-4843-4a2f-9c43-862df6149a61\") " Jan 22 14:15:55 crc kubenswrapper[5017]: I0122 14:15:55.965356 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ced57ded-4843-4a2f-9c43-862df6149a61-config-volume" (OuterVolumeSpecName: "config-volume") pod "ced57ded-4843-4a2f-9c43-862df6149a61" (UID: "ced57ded-4843-4a2f-9c43-862df6149a61"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:15:55 crc kubenswrapper[5017]: I0122 14:15:55.965888 5017 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ced57ded-4843-4a2f-9c43-862df6149a61-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:55 crc kubenswrapper[5017]: I0122 14:15:55.971991 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ced57ded-4843-4a2f-9c43-862df6149a61-kube-api-access-m772b" (OuterVolumeSpecName: "kube-api-access-m772b") pod "ced57ded-4843-4a2f-9c43-862df6149a61" (UID: "ced57ded-4843-4a2f-9c43-862df6149a61"). InnerVolumeSpecName "kube-api-access-m772b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:15:55 crc kubenswrapper[5017]: I0122 14:15:55.972128 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ced57ded-4843-4a2f-9c43-862df6149a61-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ced57ded-4843-4a2f-9c43-862df6149a61" (UID: "ced57ded-4843-4a2f-9c43-862df6149a61"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:15:56 crc kubenswrapper[5017]: I0122 14:15:56.067041 5017 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ced57ded-4843-4a2f-9c43-862df6149a61-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:56 crc kubenswrapper[5017]: I0122 14:15:56.067123 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m772b\" (UniqueName: \"kubernetes.io/projected/ced57ded-4843-4a2f-9c43-862df6149a61-kube-api-access-m772b\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:56 crc kubenswrapper[5017]: I0122 14:15:56.536752 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" event={"ID":"ced57ded-4843-4a2f-9c43-862df6149a61","Type":"ContainerDied","Data":"1f7f37d0a8b9b517452fcb89a15ca9278798cc2ff4e3dd6b408b2604271ee276"} Jan 22 14:15:56 crc kubenswrapper[5017]: I0122 14:15:56.537340 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f7f37d0a8b9b517452fcb89a15ca9278798cc2ff4e3dd6b408b2604271ee276" Jan 22 14:15:56 crc kubenswrapper[5017]: I0122 14:15:56.536862 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-2lp9k" Jan 22 14:16:12 crc kubenswrapper[5017]: I0122 14:16:12.674572 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph" event={"ID":"401cb975-04a3-4410-9163-2f691a919b6d","Type":"ContainerStarted","Data":"cc266abc76a5c185960ce4cbbe53a8227b4382ba46def45d29e2ace6aaa8f873"} Jan 22 14:16:12 crc kubenswrapper[5017]: I0122 14:16:12.696619 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph" podStartSLOduration=1.194853292 podStartE2EDuration="49.696597308s" podCreationTimestamp="2026-01-22 14:15:23 +0000 UTC" firstStartedPulling="2026-01-22 14:15:23.525018058 +0000 UTC m=+738.517479936" lastFinishedPulling="2026-01-22 14:16:12.026762054 +0000 UTC m=+787.019223952" observedRunningTime="2026-01-22 14:16:12.693377598 +0000 UTC m=+787.685839456" watchObservedRunningTime="2026-01-22 14:16:12.696597308 +0000 UTC m=+787.689059166" Jan 22 14:16:22 crc kubenswrapper[5017]: I0122 14:16:22.884850 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:16:22 crc kubenswrapper[5017]: I0122 14:16:22.886216 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:16:26 crc kubenswrapper[5017]: E0122 14:16:26.453461 5017 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.235:50732->38.129.56.235:35147: write tcp 38.129.56.235:50732->38.129.56.235:35147: write: broken pipe Jan 22 14:16:32 crc kubenswrapper[5017]: E0122 14:16:32.511578 5017 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.235:42050->38.129.56.235:35147: write tcp 38.129.56.235:42050->38.129.56.235:35147: write: broken pipe Jan 22 14:16:52 crc kubenswrapper[5017]: I0122 14:16:52.884484 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:16:52 crc kubenswrapper[5017]: I0122 14:16:52.885439 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:17:10 crc kubenswrapper[5017]: E0122 14:17:10.777339 5017 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.235:54566->38.129.56.235:35147: write tcp 38.129.56.235:54566->38.129.56.235:35147: write: broken pipe Jan 22 14:17:22 crc kubenswrapper[5017]: I0122 14:17:22.883441 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:17:22 crc kubenswrapper[5017]: I0122 14:17:22.884445 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:17:22 crc kubenswrapper[5017]: I0122 14:17:22.884522 5017 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:17:22 crc kubenswrapper[5017]: I0122 14:17:22.885517 5017 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d5ba9ef9161473c8ce388bf5a9a68f3a95a5b876f5893ae82a825172bb9859d5"} pod="openshift-machine-config-operator/machine-config-daemon-cr62h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:17:22 crc kubenswrapper[5017]: I0122 14:17:22.885595 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" containerID="cri-o://d5ba9ef9161473c8ce388bf5a9a68f3a95a5b876f5893ae82a825172bb9859d5" gracePeriod=600 Jan 22 14:17:23 crc kubenswrapper[5017]: I0122 14:17:23.164867 5017 generic.go:334] "Generic (PLEG): container finished" podID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerID="d5ba9ef9161473c8ce388bf5a9a68f3a95a5b876f5893ae82a825172bb9859d5" exitCode=0 Jan 22 14:17:23 crc kubenswrapper[5017]: I0122 14:17:23.165089 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerDied","Data":"d5ba9ef9161473c8ce388bf5a9a68f3a95a5b876f5893ae82a825172bb9859d5"} Jan 22 14:17:23 crc kubenswrapper[5017]: I0122 14:17:23.165337 5017 scope.go:117] "RemoveContainer" containerID="282d7fb498351da0e6f85f32143410226094674654291915dfeb3a542bab8422" Jan 22 14:17:24 crc kubenswrapper[5017]: I0122 14:17:24.175318 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerStarted","Data":"1d27c3a5c58b1e6c68ef017f8d5adf66a05dff18c5c08e0c60bb0b5a99e32c2a"} Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.251144 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh"] Jan 22 14:17:35 crc kubenswrapper[5017]: E0122 14:17:35.252334 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced57ded-4843-4a2f-9c43-862df6149a61" containerName="collect-profiles" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.252351 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced57ded-4843-4a2f-9c43-862df6149a61" containerName="collect-profiles" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.252684 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="ced57ded-4843-4a2f-9c43-862df6149a61" containerName="collect-profiles" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.253807 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.256192 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.266310 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh"] Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.296306 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.296410 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.296475 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdbx7\" (UniqueName: \"kubernetes.io/projected/870fa842-bac0-4b1f-9cc1-ababf261a783-kube-api-access-kdbx7\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.397731 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.397800 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.398523 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdbx7\" (UniqueName: \"kubernetes.io/projected/870fa842-bac0-4b1f-9cc1-ababf261a783-kube-api-access-kdbx7\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.398613 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.398453 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.431259 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdbx7\" (UniqueName: \"kubernetes.io/projected/870fa842-bac0-4b1f-9cc1-ababf261a783-kube-api-access-kdbx7\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.577265 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:35 crc kubenswrapper[5017]: I0122 14:17:35.930971 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh"] Jan 22 14:17:35 crc kubenswrapper[5017]: W0122 14:17:35.938486 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod870fa842_bac0_4b1f_9cc1_ababf261a783.slice/crio-2fba0f0fae12619a0b691de5cea0efe2b71765018617e80ea7346998203f305f WatchSource:0}: Error finding container 2fba0f0fae12619a0b691de5cea0efe2b71765018617e80ea7346998203f305f: Status 404 returned error can't find the container with id 2fba0f0fae12619a0b691de5cea0efe2b71765018617e80ea7346998203f305f Jan 22 14:17:36 crc kubenswrapper[5017]: I0122 14:17:36.260515 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" event={"ID":"870fa842-bac0-4b1f-9cc1-ababf261a783","Type":"ContainerStarted","Data":"77dba28cb7e43e9c21f99570ec4957c9cac12c7d2ac987beb607a2523105a595"} Jan 22 14:17:36 crc kubenswrapper[5017]: I0122 14:17:36.260984 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" event={"ID":"870fa842-bac0-4b1f-9cc1-ababf261a783","Type":"ContainerStarted","Data":"2fba0f0fae12619a0b691de5cea0efe2b71765018617e80ea7346998203f305f"} Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.279964 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vqwwr"] Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.281414 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.282218 5017 generic.go:334] "Generic (PLEG): container finished" podID="870fa842-bac0-4b1f-9cc1-ababf261a783" containerID="77dba28cb7e43e9c21f99570ec4957c9cac12c7d2ac987beb607a2523105a595" exitCode=0 Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.282281 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" event={"ID":"870fa842-bac0-4b1f-9cc1-ababf261a783","Type":"ContainerDied","Data":"77dba28cb7e43e9c21f99570ec4957c9cac12c7d2ac987beb607a2523105a595"} Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.285246 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vqwwr"] Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.432579 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-utilities\") pod \"redhat-operators-vqwwr\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.432698 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-catalog-content\") pod \"redhat-operators-vqwwr\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.432752 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk724\" (UniqueName: \"kubernetes.io/projected/edd8893f-6adf-4208-a2d6-17881f7609f4-kube-api-access-gk724\") pod \"redhat-operators-vqwwr\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.534201 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk724\" (UniqueName: \"kubernetes.io/projected/edd8893f-6adf-4208-a2d6-17881f7609f4-kube-api-access-gk724\") pod \"redhat-operators-vqwwr\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.534303 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-utilities\") pod \"redhat-operators-vqwwr\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.534389 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-catalog-content\") pod \"redhat-operators-vqwwr\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.535091 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-catalog-content\") pod \"redhat-operators-vqwwr\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.535252 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-utilities\") pod \"redhat-operators-vqwwr\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.558452 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk724\" (UniqueName: \"kubernetes.io/projected/edd8893f-6adf-4208-a2d6-17881f7609f4-kube-api-access-gk724\") pod \"redhat-operators-vqwwr\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:37 crc kubenswrapper[5017]: I0122 14:17:37.633757 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:38 crc kubenswrapper[5017]: I0122 14:17:38.074370 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vqwwr"] Jan 22 14:17:38 crc kubenswrapper[5017]: W0122 14:17:38.081227 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedd8893f_6adf_4208_a2d6_17881f7609f4.slice/crio-dcc14b98ad7c665a68407878e7c5cea6a69705a5f94ba83aec6a4649fabf828d WatchSource:0}: Error finding container dcc14b98ad7c665a68407878e7c5cea6a69705a5f94ba83aec6a4649fabf828d: Status 404 returned error can't find the container with id dcc14b98ad7c665a68407878e7c5cea6a69705a5f94ba83aec6a4649fabf828d Jan 22 14:17:38 crc kubenswrapper[5017]: I0122 14:17:38.291667 5017 generic.go:334] "Generic (PLEG): container finished" podID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerID="323742b3702a5c9fc9c58976760da2632d8eba8f1c141f59f57f90c77ef9b6ed" exitCode=0 Jan 22 14:17:38 crc kubenswrapper[5017]: I0122 14:17:38.291775 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqwwr" event={"ID":"edd8893f-6adf-4208-a2d6-17881f7609f4","Type":"ContainerDied","Data":"323742b3702a5c9fc9c58976760da2632d8eba8f1c141f59f57f90c77ef9b6ed"} Jan 22 14:17:38 crc kubenswrapper[5017]: I0122 14:17:38.292242 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqwwr" event={"ID":"edd8893f-6adf-4208-a2d6-17881f7609f4","Type":"ContainerStarted","Data":"dcc14b98ad7c665a68407878e7c5cea6a69705a5f94ba83aec6a4649fabf828d"} Jan 22 14:17:39 crc kubenswrapper[5017]: I0122 14:17:39.304475 5017 generic.go:334] "Generic (PLEG): container finished" podID="870fa842-bac0-4b1f-9cc1-ababf261a783" containerID="a6fe6426304034ec0e52355f7086c3de3107f55034c287bd75fc600123f6882c" exitCode=0 Jan 22 14:17:39 crc kubenswrapper[5017]: I0122 14:17:39.304541 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" event={"ID":"870fa842-bac0-4b1f-9cc1-ababf261a783","Type":"ContainerDied","Data":"a6fe6426304034ec0e52355f7086c3de3107f55034c287bd75fc600123f6882c"} Jan 22 14:17:40 crc kubenswrapper[5017]: I0122 14:17:40.328249 5017 generic.go:334] "Generic (PLEG): container finished" podID="870fa842-bac0-4b1f-9cc1-ababf261a783" containerID="a52b0bea447fd13c03494ad6dc18a5fa607cc0fe51e6e25183e541983b2f0a3e" exitCode=0 Jan 22 14:17:40 crc kubenswrapper[5017]: I0122 14:17:40.328358 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" event={"ID":"870fa842-bac0-4b1f-9cc1-ababf261a783","Type":"ContainerDied","Data":"a52b0bea447fd13c03494ad6dc18a5fa607cc0fe51e6e25183e541983b2f0a3e"} Jan 22 14:17:40 crc kubenswrapper[5017]: I0122 14:17:40.332017 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqwwr" event={"ID":"edd8893f-6adf-4208-a2d6-17881f7609f4","Type":"ContainerStarted","Data":"67431db66a7c9e6be428d7312742dbd364f036cebfd1de5342b8a6d59076a0c4"} Jan 22 14:17:41 crc kubenswrapper[5017]: I0122 14:17:41.829734 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:41 crc kubenswrapper[5017]: I0122 14:17:41.972098 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-util\") pod \"870fa842-bac0-4b1f-9cc1-ababf261a783\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " Jan 22 14:17:41 crc kubenswrapper[5017]: I0122 14:17:41.972262 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-bundle\") pod \"870fa842-bac0-4b1f-9cc1-ababf261a783\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " Jan 22 14:17:41 crc kubenswrapper[5017]: I0122 14:17:41.972361 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdbx7\" (UniqueName: \"kubernetes.io/projected/870fa842-bac0-4b1f-9cc1-ababf261a783-kube-api-access-kdbx7\") pod \"870fa842-bac0-4b1f-9cc1-ababf261a783\" (UID: \"870fa842-bac0-4b1f-9cc1-ababf261a783\") " Jan 22 14:17:41 crc kubenswrapper[5017]: I0122 14:17:41.973581 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-bundle" (OuterVolumeSpecName: "bundle") pod "870fa842-bac0-4b1f-9cc1-ababf261a783" (UID: "870fa842-bac0-4b1f-9cc1-ababf261a783"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:17:41 crc kubenswrapper[5017]: I0122 14:17:41.987639 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/870fa842-bac0-4b1f-9cc1-ababf261a783-kube-api-access-kdbx7" (OuterVolumeSpecName: "kube-api-access-kdbx7") pod "870fa842-bac0-4b1f-9cc1-ababf261a783" (UID: "870fa842-bac0-4b1f-9cc1-ababf261a783"). InnerVolumeSpecName "kube-api-access-kdbx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:17:41 crc kubenswrapper[5017]: I0122 14:17:41.991158 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-util" (OuterVolumeSpecName: "util") pod "870fa842-bac0-4b1f-9cc1-ababf261a783" (UID: "870fa842-bac0-4b1f-9cc1-ababf261a783"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:17:42 crc kubenswrapper[5017]: I0122 14:17:42.073981 5017 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:17:42 crc kubenswrapper[5017]: I0122 14:17:42.074034 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdbx7\" (UniqueName: \"kubernetes.io/projected/870fa842-bac0-4b1f-9cc1-ababf261a783-kube-api-access-kdbx7\") on node \"crc\" DevicePath \"\"" Jan 22 14:17:42 crc kubenswrapper[5017]: I0122 14:17:42.074051 5017 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/870fa842-bac0-4b1f-9cc1-ababf261a783-util\") on node \"crc\" DevicePath \"\"" Jan 22 14:17:42 crc kubenswrapper[5017]: I0122 14:17:42.417467 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" event={"ID":"870fa842-bac0-4b1f-9cc1-ababf261a783","Type":"ContainerDied","Data":"2fba0f0fae12619a0b691de5cea0efe2b71765018617e80ea7346998203f305f"} Jan 22 14:17:42 crc kubenswrapper[5017]: I0122 14:17:42.417546 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fba0f0fae12619a0b691de5cea0efe2b71765018617e80ea7346998203f305f" Jan 22 14:17:42 crc kubenswrapper[5017]: I0122 14:17:42.418108 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh" Jan 22 14:17:42 crc kubenswrapper[5017]: I0122 14:17:42.420964 5017 generic.go:334] "Generic (PLEG): container finished" podID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerID="67431db66a7c9e6be428d7312742dbd364f036cebfd1de5342b8a6d59076a0c4" exitCode=0 Jan 22 14:17:42 crc kubenswrapper[5017]: I0122 14:17:42.421018 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqwwr" event={"ID":"edd8893f-6adf-4208-a2d6-17881f7609f4","Type":"ContainerDied","Data":"67431db66a7c9e6be428d7312742dbd364f036cebfd1de5342b8a6d59076a0c4"} Jan 22 14:17:43 crc kubenswrapper[5017]: I0122 14:17:43.429667 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqwwr" event={"ID":"edd8893f-6adf-4208-a2d6-17881f7609f4","Type":"ContainerStarted","Data":"f503f2b2b86d69d0445dbfa0c403eff96895110b58bed64e39235fd4f6162bf6"} Jan 22 14:17:43 crc kubenswrapper[5017]: I0122 14:17:43.452382 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vqwwr" podStartSLOduration=1.737025273 podStartE2EDuration="6.452353478s" podCreationTimestamp="2026-01-22 14:17:37 +0000 UTC" firstStartedPulling="2026-01-22 14:17:38.293651908 +0000 UTC m=+873.286113766" lastFinishedPulling="2026-01-22 14:17:43.008980103 +0000 UTC m=+878.001441971" observedRunningTime="2026-01-22 14:17:43.450432653 +0000 UTC m=+878.442894521" watchObservedRunningTime="2026-01-22 14:17:43.452353478 +0000 UTC m=+878.444815336" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.600369 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-47q9q"] Jan 22 14:17:45 crc kubenswrapper[5017]: E0122 14:17:45.600700 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870fa842-bac0-4b1f-9cc1-ababf261a783" containerName="util" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.600718 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="870fa842-bac0-4b1f-9cc1-ababf261a783" containerName="util" Jan 22 14:17:45 crc kubenswrapper[5017]: E0122 14:17:45.600733 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870fa842-bac0-4b1f-9cc1-ababf261a783" containerName="extract" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.600743 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="870fa842-bac0-4b1f-9cc1-ababf261a783" containerName="extract" Jan 22 14:17:45 crc kubenswrapper[5017]: E0122 14:17:45.600770 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870fa842-bac0-4b1f-9cc1-ababf261a783" containerName="pull" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.600779 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="870fa842-bac0-4b1f-9cc1-ababf261a783" containerName="pull" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.600922 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="870fa842-bac0-4b1f-9cc1-ababf261a783" containerName="extract" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.603405 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-47q9q" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.609535 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.609614 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-7nz6p" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.616407 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.623472 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-47q9q"] Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.628046 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqhm5\" (UniqueName: \"kubernetes.io/projected/01f86f52-94be-4033-bfed-8e7a2caca5f8-kube-api-access-rqhm5\") pod \"nmstate-operator-646758c888-47q9q\" (UID: \"01f86f52-94be-4033-bfed-8e7a2caca5f8\") " pod="openshift-nmstate/nmstate-operator-646758c888-47q9q" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.728625 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqhm5\" (UniqueName: \"kubernetes.io/projected/01f86f52-94be-4033-bfed-8e7a2caca5f8-kube-api-access-rqhm5\") pod \"nmstate-operator-646758c888-47q9q\" (UID: \"01f86f52-94be-4033-bfed-8e7a2caca5f8\") " pod="openshift-nmstate/nmstate-operator-646758c888-47q9q" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.755568 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqhm5\" (UniqueName: \"kubernetes.io/projected/01f86f52-94be-4033-bfed-8e7a2caca5f8-kube-api-access-rqhm5\") pod \"nmstate-operator-646758c888-47q9q\" (UID: \"01f86f52-94be-4033-bfed-8e7a2caca5f8\") " pod="openshift-nmstate/nmstate-operator-646758c888-47q9q" Jan 22 14:17:45 crc kubenswrapper[5017]: I0122 14:17:45.919910 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-47q9q" Jan 22 14:17:46 crc kubenswrapper[5017]: I0122 14:17:46.485920 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-47q9q"] Jan 22 14:17:46 crc kubenswrapper[5017]: W0122 14:17:46.502376 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01f86f52_94be_4033_bfed_8e7a2caca5f8.slice/crio-ba777d966da27ac567614600413d403f37e843a9e66697a23d27c57ececeda26 WatchSource:0}: Error finding container ba777d966da27ac567614600413d403f37e843a9e66697a23d27c57ececeda26: Status 404 returned error can't find the container with id ba777d966da27ac567614600413d403f37e843a9e66697a23d27c57ececeda26 Jan 22 14:17:47 crc kubenswrapper[5017]: I0122 14:17:47.464218 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-47q9q" event={"ID":"01f86f52-94be-4033-bfed-8e7a2caca5f8","Type":"ContainerStarted","Data":"ba777d966da27ac567614600413d403f37e843a9e66697a23d27c57ececeda26"} Jan 22 14:17:47 crc kubenswrapper[5017]: I0122 14:17:47.635523 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:47 crc kubenswrapper[5017]: I0122 14:17:47.635617 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:48 crc kubenswrapper[5017]: I0122 14:17:48.689826 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vqwwr" podUID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerName="registry-server" probeResult="failure" output=< Jan 22 14:17:48 crc kubenswrapper[5017]: timeout: failed to connect service ":50051" within 1s Jan 22 14:17:48 crc kubenswrapper[5017]: > Jan 22 14:17:49 crc kubenswrapper[5017]: I0122 14:17:49.495845 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-47q9q" event={"ID":"01f86f52-94be-4033-bfed-8e7a2caca5f8","Type":"ContainerStarted","Data":"60d19aed06cda0b9965e57444c7059efc2bf406b867ce7af81060731b75d7193"} Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.228751 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-47q9q" podStartSLOduration=8.018087632 podStartE2EDuration="10.228719036s" podCreationTimestamp="2026-01-22 14:17:45 +0000 UTC" firstStartedPulling="2026-01-22 14:17:46.504281016 +0000 UTC m=+881.496742874" lastFinishedPulling="2026-01-22 14:17:48.71491241 +0000 UTC m=+883.707374278" observedRunningTime="2026-01-22 14:17:49.515978628 +0000 UTC m=+884.508440506" watchObservedRunningTime="2026-01-22 14:17:55.228719036 +0000 UTC m=+890.221180894" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.229952 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-wcr2m"] Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.264927 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx"] Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.267206 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-wcr2m" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.268131 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.293496 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-jgj7g" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.293563 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.300165 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-wcr2m"] Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.300756 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx"] Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.306301 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-cpqbx"] Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.307637 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.391432 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/80b4685d-56a7-44f8-a881-4dfcce29d323-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-bgtxx\" (UID: \"80b4685d-56a7-44f8-a881-4dfcce29d323\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.391483 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpfxk\" (UniqueName: \"kubernetes.io/projected/ffe16f13-85bc-4a51-9c0b-c350d92075b7-kube-api-access-lpfxk\") pod \"nmstate-metrics-54757c584b-wcr2m\" (UID: \"ffe16f13-85bc-4a51-9c0b-c350d92075b7\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-wcr2m" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.391508 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmrg9\" (UniqueName: \"kubernetes.io/projected/80b4685d-56a7-44f8-a881-4dfcce29d323-kube-api-access-tmrg9\") pod \"nmstate-webhook-8474b5b9d8-bgtxx\" (UID: \"80b4685d-56a7-44f8-a881-4dfcce29d323\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.392730 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh"] Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.393670 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.395946 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-4w98c" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.396426 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.396794 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.414928 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh"] Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.493267 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpfxk\" (UniqueName: \"kubernetes.io/projected/ffe16f13-85bc-4a51-9c0b-c350d92075b7-kube-api-access-lpfxk\") pod \"nmstate-metrics-54757c584b-wcr2m\" (UID: \"ffe16f13-85bc-4a51-9c0b-c350d92075b7\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-wcr2m" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.493348 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/70e8f966-dc89-408d-b174-5076caad75f3-ovs-socket\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.493379 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmrg9\" (UniqueName: \"kubernetes.io/projected/80b4685d-56a7-44f8-a881-4dfcce29d323-kube-api-access-tmrg9\") pod \"nmstate-webhook-8474b5b9d8-bgtxx\" (UID: \"80b4685d-56a7-44f8-a881-4dfcce29d323\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.493448 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6lwl\" (UniqueName: \"kubernetes.io/projected/83182e42-1f12-414e-a7ec-d4926434cdf1-kube-api-access-h6lwl\") pod \"nmstate-console-plugin-7754f76f8b-9zpdh\" (UID: \"83182e42-1f12-414e-a7ec-d4926434cdf1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.493472 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlv6p\" (UniqueName: \"kubernetes.io/projected/70e8f966-dc89-408d-b174-5076caad75f3-kube-api-access-tlv6p\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.493535 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/70e8f966-dc89-408d-b174-5076caad75f3-nmstate-lock\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.493561 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/83182e42-1f12-414e-a7ec-d4926434cdf1-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-9zpdh\" (UID: \"83182e42-1f12-414e-a7ec-d4926434cdf1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.493593 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/83182e42-1f12-414e-a7ec-d4926434cdf1-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-9zpdh\" (UID: \"83182e42-1f12-414e-a7ec-d4926434cdf1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.493622 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/70e8f966-dc89-408d-b174-5076caad75f3-dbus-socket\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.493766 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/80b4685d-56a7-44f8-a881-4dfcce29d323-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-bgtxx\" (UID: \"80b4685d-56a7-44f8-a881-4dfcce29d323\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.504499 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/80b4685d-56a7-44f8-a881-4dfcce29d323-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-bgtxx\" (UID: \"80b4685d-56a7-44f8-a881-4dfcce29d323\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.518336 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmrg9\" (UniqueName: \"kubernetes.io/projected/80b4685d-56a7-44f8-a881-4dfcce29d323-kube-api-access-tmrg9\") pod \"nmstate-webhook-8474b5b9d8-bgtxx\" (UID: \"80b4685d-56a7-44f8-a881-4dfcce29d323\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.518336 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpfxk\" (UniqueName: \"kubernetes.io/projected/ffe16f13-85bc-4a51-9c0b-c350d92075b7-kube-api-access-lpfxk\") pod \"nmstate-metrics-54757c584b-wcr2m\" (UID: \"ffe16f13-85bc-4a51-9c0b-c350d92075b7\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-wcr2m" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.595184 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-84fb97d564-jnttj"] Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.595375 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6lwl\" (UniqueName: \"kubernetes.io/projected/83182e42-1f12-414e-a7ec-d4926434cdf1-kube-api-access-h6lwl\") pod \"nmstate-console-plugin-7754f76f8b-9zpdh\" (UID: \"83182e42-1f12-414e-a7ec-d4926434cdf1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.595433 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlv6p\" (UniqueName: \"kubernetes.io/projected/70e8f966-dc89-408d-b174-5076caad75f3-kube-api-access-tlv6p\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.595481 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/70e8f966-dc89-408d-b174-5076caad75f3-nmstate-lock\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.595508 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/83182e42-1f12-414e-a7ec-d4926434cdf1-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-9zpdh\" (UID: \"83182e42-1f12-414e-a7ec-d4926434cdf1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.595554 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/83182e42-1f12-414e-a7ec-d4926434cdf1-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-9zpdh\" (UID: \"83182e42-1f12-414e-a7ec-d4926434cdf1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.595586 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/70e8f966-dc89-408d-b174-5076caad75f3-dbus-socket\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.595615 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/70e8f966-dc89-408d-b174-5076caad75f3-ovs-socket\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.595758 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/70e8f966-dc89-408d-b174-5076caad75f3-ovs-socket\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.596381 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/70e8f966-dc89-408d-b174-5076caad75f3-nmstate-lock\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.596560 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.598540 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/83182e42-1f12-414e-a7ec-d4926434cdf1-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-9zpdh\" (UID: \"83182e42-1f12-414e-a7ec-d4926434cdf1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.598932 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/70e8f966-dc89-408d-b174-5076caad75f3-dbus-socket\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.604945 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/83182e42-1f12-414e-a7ec-d4926434cdf1-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-9zpdh\" (UID: \"83182e42-1f12-414e-a7ec-d4926434cdf1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.612519 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-84fb97d564-jnttj"] Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.627040 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-wcr2m" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.627303 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlv6p\" (UniqueName: \"kubernetes.io/projected/70e8f966-dc89-408d-b174-5076caad75f3-kube-api-access-tlv6p\") pod \"nmstate-handler-cpqbx\" (UID: \"70e8f966-dc89-408d-b174-5076caad75f3\") " pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.633148 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6lwl\" (UniqueName: \"kubernetes.io/projected/83182e42-1f12-414e-a7ec-d4926434cdf1-kube-api-access-h6lwl\") pod \"nmstate-console-plugin-7754f76f8b-9zpdh\" (UID: \"83182e42-1f12-414e-a7ec-d4926434cdf1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.642277 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.654984 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.697071 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/566741cf-5905-4f43-966d-536b58364444-console-oauth-config\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.697130 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwkh8\" (UniqueName: \"kubernetes.io/projected/566741cf-5905-4f43-966d-536b58364444-kube-api-access-pwkh8\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.697168 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/566741cf-5905-4f43-966d-536b58364444-console-serving-cert\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.697191 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-oauth-serving-cert\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.697216 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-trusted-ca-bundle\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.697234 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-service-ca\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.697269 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-console-config\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.713699 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.799158 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/566741cf-5905-4f43-966d-536b58364444-console-oauth-config\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.799214 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwkh8\" (UniqueName: \"kubernetes.io/projected/566741cf-5905-4f43-966d-536b58364444-kube-api-access-pwkh8\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.799256 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/566741cf-5905-4f43-966d-536b58364444-console-serving-cert\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.799289 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-oauth-serving-cert\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.799317 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-trusted-ca-bundle\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.799334 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-service-ca\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.799368 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-console-config\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.800796 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-console-config\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.801586 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-trusted-ca-bundle\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.801588 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-oauth-serving-cert\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.802679 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/566741cf-5905-4f43-966d-536b58364444-service-ca\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.805558 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/566741cf-5905-4f43-966d-536b58364444-console-oauth-config\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.805723 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/566741cf-5905-4f43-966d-536b58364444-console-serving-cert\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.816575 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwkh8\" (UniqueName: \"kubernetes.io/projected/566741cf-5905-4f43-966d-536b58364444-kube-api-access-pwkh8\") pod \"console-84fb97d564-jnttj\" (UID: \"566741cf-5905-4f43-966d-536b58364444\") " pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:55 crc kubenswrapper[5017]: I0122 14:17:55.964948 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:17:56 crc kubenswrapper[5017]: I0122 14:17:56.110587 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx"] Jan 22 14:17:56 crc kubenswrapper[5017]: W0122 14:17:56.118684 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80b4685d_56a7_44f8_a881_4dfcce29d323.slice/crio-096ea3cc32091d0a8ebb4dec1e634c42e6e85e7976aae4cbcef82b306708a5cc WatchSource:0}: Error finding container 096ea3cc32091d0a8ebb4dec1e634c42e6e85e7976aae4cbcef82b306708a5cc: Status 404 returned error can't find the container with id 096ea3cc32091d0a8ebb4dec1e634c42e6e85e7976aae4cbcef82b306708a5cc Jan 22 14:17:56 crc kubenswrapper[5017]: I0122 14:17:56.353554 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh"] Jan 22 14:17:56 crc kubenswrapper[5017]: W0122 14:17:56.358248 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83182e42_1f12_414e_a7ec_d4926434cdf1.slice/crio-0c6a527acb4c2b6a7e5f80914f2a1f8de990e489be0898884347af0a34b7adf4 WatchSource:0}: Error finding container 0c6a527acb4c2b6a7e5f80914f2a1f8de990e489be0898884347af0a34b7adf4: Status 404 returned error can't find the container with id 0c6a527acb4c2b6a7e5f80914f2a1f8de990e489be0898884347af0a34b7adf4 Jan 22 14:17:56 crc kubenswrapper[5017]: I0122 14:17:56.430326 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-wcr2m"] Jan 22 14:17:56 crc kubenswrapper[5017]: I0122 14:17:56.558042 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" event={"ID":"80b4685d-56a7-44f8-a881-4dfcce29d323","Type":"ContainerStarted","Data":"096ea3cc32091d0a8ebb4dec1e634c42e6e85e7976aae4cbcef82b306708a5cc"} Jan 22 14:17:56 crc kubenswrapper[5017]: I0122 14:17:56.561696 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-cpqbx" event={"ID":"70e8f966-dc89-408d-b174-5076caad75f3","Type":"ContainerStarted","Data":"b74bf38bccf163e4f1a65030da7080761873923bee5c24880f401b25fd6fa4ed"} Jan 22 14:17:56 crc kubenswrapper[5017]: I0122 14:17:56.563777 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" event={"ID":"83182e42-1f12-414e-a7ec-d4926434cdf1","Type":"ContainerStarted","Data":"0c6a527acb4c2b6a7e5f80914f2a1f8de990e489be0898884347af0a34b7adf4"} Jan 22 14:17:56 crc kubenswrapper[5017]: I0122 14:17:56.565899 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-wcr2m" event={"ID":"ffe16f13-85bc-4a51-9c0b-c350d92075b7","Type":"ContainerStarted","Data":"5dc65a8be59b7cf9a0834c6626cb504c2fc227d7a3e1a8bf56240b7cd2f8c3ae"} Jan 22 14:17:56 crc kubenswrapper[5017]: I0122 14:17:56.594391 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-84fb97d564-jnttj"] Jan 22 14:17:56 crc kubenswrapper[5017]: W0122 14:17:56.603708 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod566741cf_5905_4f43_966d_536b58364444.slice/crio-20312fe44d4468c5a4c62663dc2fbd5cb0fd3152a9b24096443c5308a2b3b9ad WatchSource:0}: Error finding container 20312fe44d4468c5a4c62663dc2fbd5cb0fd3152a9b24096443c5308a2b3b9ad: Status 404 returned error can't find the container with id 20312fe44d4468c5a4c62663dc2fbd5cb0fd3152a9b24096443c5308a2b3b9ad Jan 22 14:17:57 crc kubenswrapper[5017]: I0122 14:17:57.581025 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-84fb97d564-jnttj" event={"ID":"566741cf-5905-4f43-966d-536b58364444","Type":"ContainerStarted","Data":"12ca938e526356f3c2430b6b88142a9d073bbdf5ff13f18808d8ada852274c0b"} Jan 22 14:17:57 crc kubenswrapper[5017]: I0122 14:17:57.581110 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-84fb97d564-jnttj" event={"ID":"566741cf-5905-4f43-966d-536b58364444","Type":"ContainerStarted","Data":"20312fe44d4468c5a4c62663dc2fbd5cb0fd3152a9b24096443c5308a2b3b9ad"} Jan 22 14:17:57 crc kubenswrapper[5017]: I0122 14:17:57.608794 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-84fb97d564-jnttj" podStartSLOduration=2.608752846 podStartE2EDuration="2.608752846s" podCreationTimestamp="2026-01-22 14:17:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:17:57.603679173 +0000 UTC m=+892.596141061" watchObservedRunningTime="2026-01-22 14:17:57.608752846 +0000 UTC m=+892.601214724" Jan 22 14:17:57 crc kubenswrapper[5017]: I0122 14:17:57.709330 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:57 crc kubenswrapper[5017]: I0122 14:17:57.757635 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:17:57 crc kubenswrapper[5017]: I0122 14:17:57.950903 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vqwwr"] Jan 22 14:17:59 crc kubenswrapper[5017]: I0122 14:17:59.665040 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vqwwr" podUID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerName="registry-server" containerID="cri-o://f503f2b2b86d69d0445dbfa0c403eff96895110b58bed64e39235fd4f6162bf6" gracePeriod=2 Jan 22 14:18:00 crc kubenswrapper[5017]: I0122 14:18:00.695345 5017 generic.go:334] "Generic (PLEG): container finished" podID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerID="f503f2b2b86d69d0445dbfa0c403eff96895110b58bed64e39235fd4f6162bf6" exitCode=0 Jan 22 14:18:00 crc kubenswrapper[5017]: I0122 14:18:00.695414 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqwwr" event={"ID":"edd8893f-6adf-4208-a2d6-17881f7609f4","Type":"ContainerDied","Data":"f503f2b2b86d69d0445dbfa0c403eff96895110b58bed64e39235fd4f6162bf6"} Jan 22 14:18:00 crc kubenswrapper[5017]: I0122 14:18:00.784440 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:18:00 crc kubenswrapper[5017]: I0122 14:18:00.905427 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-utilities\") pod \"edd8893f-6adf-4208-a2d6-17881f7609f4\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " Jan 22 14:18:00 crc kubenswrapper[5017]: I0122 14:18:00.905616 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk724\" (UniqueName: \"kubernetes.io/projected/edd8893f-6adf-4208-a2d6-17881f7609f4-kube-api-access-gk724\") pod \"edd8893f-6adf-4208-a2d6-17881f7609f4\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " Jan 22 14:18:00 crc kubenswrapper[5017]: I0122 14:18:00.905648 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-catalog-content\") pod \"edd8893f-6adf-4208-a2d6-17881f7609f4\" (UID: \"edd8893f-6adf-4208-a2d6-17881f7609f4\") " Jan 22 14:18:00 crc kubenswrapper[5017]: I0122 14:18:00.906593 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-utilities" (OuterVolumeSpecName: "utilities") pod "edd8893f-6adf-4208-a2d6-17881f7609f4" (UID: "edd8893f-6adf-4208-a2d6-17881f7609f4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:18:00 crc kubenswrapper[5017]: I0122 14:18:00.911416 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edd8893f-6adf-4208-a2d6-17881f7609f4-kube-api-access-gk724" (OuterVolumeSpecName: "kube-api-access-gk724") pod "edd8893f-6adf-4208-a2d6-17881f7609f4" (UID: "edd8893f-6adf-4208-a2d6-17881f7609f4"). InnerVolumeSpecName "kube-api-access-gk724". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.007907 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk724\" (UniqueName: \"kubernetes.io/projected/edd8893f-6adf-4208-a2d6-17881f7609f4-kube-api-access-gk724\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.007957 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.018013 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "edd8893f-6adf-4208-a2d6-17881f7609f4" (UID: "edd8893f-6adf-4208-a2d6-17881f7609f4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.109056 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edd8893f-6adf-4208-a2d6-17881f7609f4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.708399 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqwwr" event={"ID":"edd8893f-6adf-4208-a2d6-17881f7609f4","Type":"ContainerDied","Data":"dcc14b98ad7c665a68407878e7c5cea6a69705a5f94ba83aec6a4649fabf828d"} Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.708445 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vqwwr" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.708839 5017 scope.go:117] "RemoveContainer" containerID="f503f2b2b86d69d0445dbfa0c403eff96895110b58bed64e39235fd4f6162bf6" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.711492 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" event={"ID":"80b4685d-56a7-44f8-a881-4dfcce29d323","Type":"ContainerStarted","Data":"9b010701f0ecc1a10aaf65fe0fc8c4a7198cbcf800d5ab275ff5bfdd21f29020"} Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.711526 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.714072 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-cpqbx" event={"ID":"70e8f966-dc89-408d-b174-5076caad75f3","Type":"ContainerStarted","Data":"e0e77f7648852d41022996c80a38a38bb2988e2fb00ee0943f162208d96fc0e5"} Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.714183 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.716001 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" event={"ID":"83182e42-1f12-414e-a7ec-d4926434cdf1","Type":"ContainerStarted","Data":"24aefdecef0bdad452f1a5706e1672aaea5c9f14b286218e969d4bb504aa7f40"} Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.718598 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-wcr2m" event={"ID":"ffe16f13-85bc-4a51-9c0b-c350d92075b7","Type":"ContainerStarted","Data":"ec41a1e7ff39498820d6c67f6ff53ba89f19b6d1a9ae30ea9ad5ef6b13212ac9"} Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.738565 5017 scope.go:117] "RemoveContainer" containerID="67431db66a7c9e6be428d7312742dbd364f036cebfd1de5342b8a6d59076a0c4" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.743183 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vqwwr"] Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.754646 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vqwwr"] Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.758149 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-cpqbx" podStartSLOduration=1.852293858 podStartE2EDuration="6.758127144s" podCreationTimestamp="2026-01-22 14:17:55 +0000 UTC" firstStartedPulling="2026-01-22 14:17:55.692684132 +0000 UTC m=+890.685145990" lastFinishedPulling="2026-01-22 14:18:00.598517418 +0000 UTC m=+895.590979276" observedRunningTime="2026-01-22 14:18:01.745906289 +0000 UTC m=+896.738368157" watchObservedRunningTime="2026-01-22 14:18:01.758127144 +0000 UTC m=+896.750589012" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.779922 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" podStartSLOduration=2.311715096 podStartE2EDuration="6.779891619s" podCreationTimestamp="2026-01-22 14:17:55 +0000 UTC" firstStartedPulling="2026-01-22 14:17:56.122777461 +0000 UTC m=+891.115239329" lastFinishedPulling="2026-01-22 14:18:00.590953994 +0000 UTC m=+895.583415852" observedRunningTime="2026-01-22 14:18:01.77072246 +0000 UTC m=+896.763184338" watchObservedRunningTime="2026-01-22 14:18:01.779891619 +0000 UTC m=+896.772353477" Jan 22 14:18:01 crc kubenswrapper[5017]: I0122 14:18:01.781036 5017 scope.go:117] "RemoveContainer" containerID="323742b3702a5c9fc9c58976760da2632d8eba8f1c141f59f57f90c77ef9b6ed" Jan 22 14:18:03 crc kubenswrapper[5017]: I0122 14:18:03.270920 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edd8893f-6adf-4208-a2d6-17881f7609f4" path="/var/lib/kubelet/pods/edd8893f-6adf-4208-a2d6-17881f7609f4/volumes" Jan 22 14:18:03 crc kubenswrapper[5017]: I0122 14:18:03.741008 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-wcr2m" event={"ID":"ffe16f13-85bc-4a51-9c0b-c350d92075b7","Type":"ContainerStarted","Data":"34b34db8a8599ddc2e8adb34de2655ca22c50c4a89608f7a754081efd7b27074"} Jan 22 14:18:03 crc kubenswrapper[5017]: I0122 14:18:03.767986 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-9zpdh" podStartSLOduration=4.556889495 podStartE2EDuration="8.767919145s" podCreationTimestamp="2026-01-22 14:17:55 +0000 UTC" firstStartedPulling="2026-01-22 14:17:56.36206759 +0000 UTC m=+891.354529448" lastFinishedPulling="2026-01-22 14:18:00.57309724 +0000 UTC m=+895.565559098" observedRunningTime="2026-01-22 14:18:01.794983185 +0000 UTC m=+896.787445053" watchObservedRunningTime="2026-01-22 14:18:03.767919145 +0000 UTC m=+898.760381043" Jan 22 14:18:03 crc kubenswrapper[5017]: I0122 14:18:03.776977 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-wcr2m" podStartSLOduration=2.252214655 podStartE2EDuration="8.776945009s" podCreationTimestamp="2026-01-22 14:17:55 +0000 UTC" firstStartedPulling="2026-01-22 14:17:56.43956498 +0000 UTC m=+891.432026828" lastFinishedPulling="2026-01-22 14:18:02.964295324 +0000 UTC m=+897.956757182" observedRunningTime="2026-01-22 14:18:03.762589704 +0000 UTC m=+898.755051612" watchObservedRunningTime="2026-01-22 14:18:03.776945009 +0000 UTC m=+898.769406907" Jan 22 14:18:05 crc kubenswrapper[5017]: I0122 14:18:05.683632 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-cpqbx" Jan 22 14:18:05 crc kubenswrapper[5017]: I0122 14:18:05.966867 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:18:05 crc kubenswrapper[5017]: I0122 14:18:05.967342 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:18:05 crc kubenswrapper[5017]: I0122 14:18:05.974242 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:18:06 crc kubenswrapper[5017]: I0122 14:18:06.783887 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-84fb97d564-jnttj" Jan 22 14:18:06 crc kubenswrapper[5017]: I0122 14:18:06.852535 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-j4qnq"] Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.323151 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hhxz6"] Jan 22 14:18:09 crc kubenswrapper[5017]: E0122 14:18:09.324040 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerName="extract-utilities" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.324088 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerName="extract-utilities" Jan 22 14:18:09 crc kubenswrapper[5017]: E0122 14:18:09.324102 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerName="extract-content" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.324129 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerName="extract-content" Jan 22 14:18:09 crc kubenswrapper[5017]: E0122 14:18:09.324153 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerName="registry-server" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.324163 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerName="registry-server" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.324309 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="edd8893f-6adf-4208-a2d6-17881f7609f4" containerName="registry-server" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.325482 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.350342 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hhxz6"] Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.361486 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-utilities\") pod \"community-operators-hhxz6\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.361606 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-catalog-content\") pod \"community-operators-hhxz6\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.361656 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwd5g\" (UniqueName: \"kubernetes.io/projected/d30c0806-53b6-45be-bb69-4d025d645302-kube-api-access-cwd5g\") pod \"community-operators-hhxz6\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.463549 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-utilities\") pod \"community-operators-hhxz6\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.463902 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-catalog-content\") pod \"community-operators-hhxz6\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.464041 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwd5g\" (UniqueName: \"kubernetes.io/projected/d30c0806-53b6-45be-bb69-4d025d645302-kube-api-access-cwd5g\") pod \"community-operators-hhxz6\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.464260 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-utilities\") pod \"community-operators-hhxz6\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.464821 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-catalog-content\") pod \"community-operators-hhxz6\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.489725 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwd5g\" (UniqueName: \"kubernetes.io/projected/d30c0806-53b6-45be-bb69-4d025d645302-kube-api-access-cwd5g\") pod \"community-operators-hhxz6\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.650020 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:09 crc kubenswrapper[5017]: I0122 14:18:09.944200 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hhxz6"] Jan 22 14:18:10 crc kubenswrapper[5017]: I0122 14:18:10.827874 5017 generic.go:334] "Generic (PLEG): container finished" podID="d30c0806-53b6-45be-bb69-4d025d645302" containerID="18333d1c3b4058b8102cae9597ed8cad8a163a3929955ef24072f5373db98277" exitCode=0 Jan 22 14:18:10 crc kubenswrapper[5017]: I0122 14:18:10.828308 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hhxz6" event={"ID":"d30c0806-53b6-45be-bb69-4d025d645302","Type":"ContainerDied","Data":"18333d1c3b4058b8102cae9597ed8cad8a163a3929955ef24072f5373db98277"} Jan 22 14:18:10 crc kubenswrapper[5017]: I0122 14:18:10.829220 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hhxz6" event={"ID":"d30c0806-53b6-45be-bb69-4d025d645302","Type":"ContainerStarted","Data":"baacf93741e6c69904cb64090ab9f6cfdcfef355cdb53a467b4f2ff1296c39a2"} Jan 22 14:18:12 crc kubenswrapper[5017]: I0122 14:18:12.846374 5017 generic.go:334] "Generic (PLEG): container finished" podID="d30c0806-53b6-45be-bb69-4d025d645302" containerID="38e9b7048f5d59c26334df6d1be3799ac3e06d0995895930e24c3f8e73cce5e7" exitCode=0 Jan 22 14:18:12 crc kubenswrapper[5017]: I0122 14:18:12.846459 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hhxz6" event={"ID":"d30c0806-53b6-45be-bb69-4d025d645302","Type":"ContainerDied","Data":"38e9b7048f5d59c26334df6d1be3799ac3e06d0995895930e24c3f8e73cce5e7"} Jan 22 14:18:13 crc kubenswrapper[5017]: I0122 14:18:13.856501 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hhxz6" event={"ID":"d30c0806-53b6-45be-bb69-4d025d645302","Type":"ContainerStarted","Data":"abb1125d106a2b567ae6e1bb8de603635a23db9f5c1a920242f0553ef2eb4fce"} Jan 22 14:18:13 crc kubenswrapper[5017]: I0122 14:18:13.880727 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hhxz6" podStartSLOduration=2.494904541 podStartE2EDuration="4.880695912s" podCreationTimestamp="2026-01-22 14:18:09 +0000 UTC" firstStartedPulling="2026-01-22 14:18:10.831424969 +0000 UTC m=+905.823886827" lastFinishedPulling="2026-01-22 14:18:13.21721635 +0000 UTC m=+908.209678198" observedRunningTime="2026-01-22 14:18:13.879361934 +0000 UTC m=+908.871823802" watchObservedRunningTime="2026-01-22 14:18:13.880695912 +0000 UTC m=+908.873157760" Jan 22 14:18:15 crc kubenswrapper[5017]: I0122 14:18:15.649413 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bgtxx" Jan 22 14:18:19 crc kubenswrapper[5017]: I0122 14:18:19.650224 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:19 crc kubenswrapper[5017]: I0122 14:18:19.650756 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:19 crc kubenswrapper[5017]: I0122 14:18:19.698506 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:19 crc kubenswrapper[5017]: I0122 14:18:19.960187 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:20 crc kubenswrapper[5017]: I0122 14:18:20.009477 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hhxz6"] Jan 22 14:18:21 crc kubenswrapper[5017]: I0122 14:18:21.933381 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hhxz6" podUID="d30c0806-53b6-45be-bb69-4d025d645302" containerName="registry-server" containerID="cri-o://abb1125d106a2b567ae6e1bb8de603635a23db9f5c1a920242f0553ef2eb4fce" gracePeriod=2 Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.348777 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-d9lvp"] Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.350660 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.365690 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d9lvp"] Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.368956 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-catalog-content\") pod \"certified-operators-d9lvp\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.369086 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zhnd\" (UniqueName: \"kubernetes.io/projected/808690d6-427f-4171-b7e6-6e1fa277f227-kube-api-access-6zhnd\") pod \"certified-operators-d9lvp\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.369320 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-utilities\") pod \"certified-operators-d9lvp\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.470629 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-utilities\") pod \"certified-operators-d9lvp\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.471195 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-catalog-content\") pod \"certified-operators-d9lvp\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.471230 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zhnd\" (UniqueName: \"kubernetes.io/projected/808690d6-427f-4171-b7e6-6e1fa277f227-kube-api-access-6zhnd\") pod \"certified-operators-d9lvp\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.471492 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-utilities\") pod \"certified-operators-d9lvp\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.471812 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-catalog-content\") pod \"certified-operators-d9lvp\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.494713 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zhnd\" (UniqueName: \"kubernetes.io/projected/808690d6-427f-4171-b7e6-6e1fa277f227-kube-api-access-6zhnd\") pod \"certified-operators-d9lvp\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.672352 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.946543 5017 generic.go:334] "Generic (PLEG): container finished" podID="d30c0806-53b6-45be-bb69-4d025d645302" containerID="abb1125d106a2b567ae6e1bb8de603635a23db9f5c1a920242f0553ef2eb4fce" exitCode=0 Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.947006 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hhxz6" event={"ID":"d30c0806-53b6-45be-bb69-4d025d645302","Type":"ContainerDied","Data":"abb1125d106a2b567ae6e1bb8de603635a23db9f5c1a920242f0553ef2eb4fce"} Jan 22 14:18:22 crc kubenswrapper[5017]: I0122 14:18:22.969337 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d9lvp"] Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.520600 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.598534 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-utilities\") pod \"d30c0806-53b6-45be-bb69-4d025d645302\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.598988 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwd5g\" (UniqueName: \"kubernetes.io/projected/d30c0806-53b6-45be-bb69-4d025d645302-kube-api-access-cwd5g\") pod \"d30c0806-53b6-45be-bb69-4d025d645302\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.599060 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-catalog-content\") pod \"d30c0806-53b6-45be-bb69-4d025d645302\" (UID: \"d30c0806-53b6-45be-bb69-4d025d645302\") " Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.600104 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-utilities" (OuterVolumeSpecName: "utilities") pod "d30c0806-53b6-45be-bb69-4d025d645302" (UID: "d30c0806-53b6-45be-bb69-4d025d645302"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.621047 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d30c0806-53b6-45be-bb69-4d025d645302-kube-api-access-cwd5g" (OuterVolumeSpecName: "kube-api-access-cwd5g") pod "d30c0806-53b6-45be-bb69-4d025d645302" (UID: "d30c0806-53b6-45be-bb69-4d025d645302"). InnerVolumeSpecName "kube-api-access-cwd5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.685463 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d30c0806-53b6-45be-bb69-4d025d645302" (UID: "d30c0806-53b6-45be-bb69-4d025d645302"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.700664 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.700701 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwd5g\" (UniqueName: \"kubernetes.io/projected/d30c0806-53b6-45be-bb69-4d025d645302-kube-api-access-cwd5g\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.700714 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d30c0806-53b6-45be-bb69-4d025d645302-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.958102 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hhxz6" event={"ID":"d30c0806-53b6-45be-bb69-4d025d645302","Type":"ContainerDied","Data":"baacf93741e6c69904cb64090ab9f6cfdcfef355cdb53a467b4f2ff1296c39a2"} Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.958233 5017 scope.go:117] "RemoveContainer" containerID="abb1125d106a2b567ae6e1bb8de603635a23db9f5c1a920242f0553ef2eb4fce" Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.958152 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hhxz6" Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.960896 5017 generic.go:334] "Generic (PLEG): container finished" podID="808690d6-427f-4171-b7e6-6e1fa277f227" containerID="1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52" exitCode=0 Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.961004 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d9lvp" event={"ID":"808690d6-427f-4171-b7e6-6e1fa277f227","Type":"ContainerDied","Data":"1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52"} Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.961474 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d9lvp" event={"ID":"808690d6-427f-4171-b7e6-6e1fa277f227","Type":"ContainerStarted","Data":"1a5a1c4335ebaa6ca5d6cca24b9313f0850ea2fe66d05721bb96ac4c5d1018d5"} Jan 22 14:18:23 crc kubenswrapper[5017]: I0122 14:18:23.995460 5017 scope.go:117] "RemoveContainer" containerID="38e9b7048f5d59c26334df6d1be3799ac3e06d0995895930e24c3f8e73cce5e7" Jan 22 14:18:24 crc kubenswrapper[5017]: I0122 14:18:24.021419 5017 scope.go:117] "RemoveContainer" containerID="18333d1c3b4058b8102cae9597ed8cad8a163a3929955ef24072f5373db98277" Jan 22 14:18:24 crc kubenswrapper[5017]: I0122 14:18:24.051050 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hhxz6"] Jan 22 14:18:24 crc kubenswrapper[5017]: I0122 14:18:24.057444 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hhxz6"] Jan 22 14:18:24 crc kubenswrapper[5017]: I0122 14:18:24.971797 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d9lvp" event={"ID":"808690d6-427f-4171-b7e6-6e1fa277f227","Type":"ContainerStarted","Data":"5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2"} Jan 22 14:18:25 crc kubenswrapper[5017]: I0122 14:18:25.277704 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d30c0806-53b6-45be-bb69-4d025d645302" path="/var/lib/kubelet/pods/d30c0806-53b6-45be-bb69-4d025d645302/volumes" Jan 22 14:18:25 crc kubenswrapper[5017]: I0122 14:18:25.980045 5017 generic.go:334] "Generic (PLEG): container finished" podID="808690d6-427f-4171-b7e6-6e1fa277f227" containerID="5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2" exitCode=0 Jan 22 14:18:25 crc kubenswrapper[5017]: I0122 14:18:25.980140 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d9lvp" event={"ID":"808690d6-427f-4171-b7e6-6e1fa277f227","Type":"ContainerDied","Data":"5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2"} Jan 22 14:18:26 crc kubenswrapper[5017]: I0122 14:18:26.993818 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d9lvp" event={"ID":"808690d6-427f-4171-b7e6-6e1fa277f227","Type":"ContainerStarted","Data":"31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977"} Jan 22 14:18:27 crc kubenswrapper[5017]: I0122 14:18:27.025138 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-d9lvp" podStartSLOduration=2.612355241 podStartE2EDuration="5.025084535s" podCreationTimestamp="2026-01-22 14:18:22 +0000 UTC" firstStartedPulling="2026-01-22 14:18:23.963175726 +0000 UTC m=+918.955637574" lastFinishedPulling="2026-01-22 14:18:26.37590501 +0000 UTC m=+921.368366868" observedRunningTime="2026-01-22 14:18:27.020381472 +0000 UTC m=+922.012843340" watchObservedRunningTime="2026-01-22 14:18:27.025084535 +0000 UTC m=+922.017546393" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.404442 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v"] Jan 22 14:18:30 crc kubenswrapper[5017]: E0122 14:18:30.405225 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d30c0806-53b6-45be-bb69-4d025d645302" containerName="extract-content" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.405240 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="d30c0806-53b6-45be-bb69-4d025d645302" containerName="extract-content" Jan 22 14:18:30 crc kubenswrapper[5017]: E0122 14:18:30.405261 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d30c0806-53b6-45be-bb69-4d025d645302" containerName="extract-utilities" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.405267 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="d30c0806-53b6-45be-bb69-4d025d645302" containerName="extract-utilities" Jan 22 14:18:30 crc kubenswrapper[5017]: E0122 14:18:30.405275 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d30c0806-53b6-45be-bb69-4d025d645302" containerName="registry-server" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.405281 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="d30c0806-53b6-45be-bb69-4d025d645302" containerName="registry-server" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.405397 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="d30c0806-53b6-45be-bb69-4d025d645302" containerName="registry-server" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.406316 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.409056 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.416298 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v"] Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.527781 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.527847 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9msw8\" (UniqueName: \"kubernetes.io/projected/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-kube-api-access-9msw8\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.527886 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.629864 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.630295 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9msw8\" (UniqueName: \"kubernetes.io/projected/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-kube-api-access-9msw8\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.630409 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.630757 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.630911 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.652558 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9msw8\" (UniqueName: \"kubernetes.io/projected/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-kube-api-access-9msw8\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.725129 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:30 crc kubenswrapper[5017]: I0122 14:18:30.984497 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v"] Jan 22 14:18:31 crc kubenswrapper[5017]: I0122 14:18:31.020887 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" event={"ID":"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0","Type":"ContainerStarted","Data":"35b8ad21d188d829f746610e486492783310a0fe2dcf78e4c84db864b5224137"} Jan 22 14:18:31 crc kubenswrapper[5017]: I0122 14:18:31.898957 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-j4qnq" podUID="66a013c9-01a4-4027-99ec-185312c81a41" containerName="console" containerID="cri-o://f6efecb73e32e1ac970b6891877e2fed057d48569e6782d1aff86ee6c0901e06" gracePeriod=15 Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.027948 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-j4qnq_66a013c9-01a4-4027-99ec-185312c81a41/console/0.log" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.027992 5017 generic.go:334] "Generic (PLEG): container finished" podID="66a013c9-01a4-4027-99ec-185312c81a41" containerID="f6efecb73e32e1ac970b6891877e2fed057d48569e6782d1aff86ee6c0901e06" exitCode=2 Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.028086 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-j4qnq" event={"ID":"66a013c9-01a4-4027-99ec-185312c81a41","Type":"ContainerDied","Data":"f6efecb73e32e1ac970b6891877e2fed057d48569e6782d1aff86ee6c0901e06"} Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.029763 5017 generic.go:334] "Generic (PLEG): container finished" podID="e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" containerID="5259a8fcfda9dbe40274647358051343ecf176b563df62f127c6c92152cc90e7" exitCode=0 Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.029797 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" event={"ID":"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0","Type":"ContainerDied","Data":"5259a8fcfda9dbe40274647358051343ecf176b563df62f127c6c92152cc90e7"} Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.401392 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-j4qnq_66a013c9-01a4-4027-99ec-185312c81a41/console/0.log" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.401500 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.564727 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-console-config\") pod \"66a013c9-01a4-4027-99ec-185312c81a41\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.564784 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7lqv\" (UniqueName: \"kubernetes.io/projected/66a013c9-01a4-4027-99ec-185312c81a41-kube-api-access-s7lqv\") pod \"66a013c9-01a4-4027-99ec-185312c81a41\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.564821 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-trusted-ca-bundle\") pod \"66a013c9-01a4-4027-99ec-185312c81a41\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.564879 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-oauth-config\") pod \"66a013c9-01a4-4027-99ec-185312c81a41\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.564981 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-serving-cert\") pod \"66a013c9-01a4-4027-99ec-185312c81a41\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.565034 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-service-ca\") pod \"66a013c9-01a4-4027-99ec-185312c81a41\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.565085 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-oauth-serving-cert\") pod \"66a013c9-01a4-4027-99ec-185312c81a41\" (UID: \"66a013c9-01a4-4027-99ec-185312c81a41\") " Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.570506 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-service-ca" (OuterVolumeSpecName: "service-ca") pod "66a013c9-01a4-4027-99ec-185312c81a41" (UID: "66a013c9-01a4-4027-99ec-185312c81a41"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.570683 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "66a013c9-01a4-4027-99ec-185312c81a41" (UID: "66a013c9-01a4-4027-99ec-185312c81a41"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.570657 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-console-config" (OuterVolumeSpecName: "console-config") pod "66a013c9-01a4-4027-99ec-185312c81a41" (UID: "66a013c9-01a4-4027-99ec-185312c81a41"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.570680 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "66a013c9-01a4-4027-99ec-185312c81a41" (UID: "66a013c9-01a4-4027-99ec-185312c81a41"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.573522 5017 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-console-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.573558 5017 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.573572 5017 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.573590 5017 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/66a013c9-01a4-4027-99ec-185312c81a41-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.578290 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "66a013c9-01a4-4027-99ec-185312c81a41" (UID: "66a013c9-01a4-4027-99ec-185312c81a41"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.588867 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "66a013c9-01a4-4027-99ec-185312c81a41" (UID: "66a013c9-01a4-4027-99ec-185312c81a41"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.589926 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66a013c9-01a4-4027-99ec-185312c81a41-kube-api-access-s7lqv" (OuterVolumeSpecName: "kube-api-access-s7lqv") pod "66a013c9-01a4-4027-99ec-185312c81a41" (UID: "66a013c9-01a4-4027-99ec-185312c81a41"). InnerVolumeSpecName "kube-api-access-s7lqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.673549 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.673988 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.674533 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7lqv\" (UniqueName: \"kubernetes.io/projected/66a013c9-01a4-4027-99ec-185312c81a41-kube-api-access-s7lqv\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.674567 5017 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.674578 5017 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/66a013c9-01a4-4027-99ec-185312c81a41-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:32 crc kubenswrapper[5017]: I0122 14:18:32.720024 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:33 crc kubenswrapper[5017]: I0122 14:18:33.038178 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-j4qnq_66a013c9-01a4-4027-99ec-185312c81a41/console/0.log" Jan 22 14:18:33 crc kubenswrapper[5017]: I0122 14:18:33.038310 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-j4qnq" event={"ID":"66a013c9-01a4-4027-99ec-185312c81a41","Type":"ContainerDied","Data":"943861b9602aaab984d2a6abc8d059d47b65f6b804eceaa8fa820836fb58cc09"} Jan 22 14:18:33 crc kubenswrapper[5017]: I0122 14:18:33.038341 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-j4qnq" Jan 22 14:18:33 crc kubenswrapper[5017]: I0122 14:18:33.038399 5017 scope.go:117] "RemoveContainer" containerID="f6efecb73e32e1ac970b6891877e2fed057d48569e6782d1aff86ee6c0901e06" Jan 22 14:18:33 crc kubenswrapper[5017]: I0122 14:18:33.077457 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-j4qnq"] Jan 22 14:18:33 crc kubenswrapper[5017]: I0122 14:18:33.087014 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:33 crc kubenswrapper[5017]: I0122 14:18:33.088273 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-j4qnq"] Jan 22 14:18:33 crc kubenswrapper[5017]: I0122 14:18:33.269753 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66a013c9-01a4-4027-99ec-185312c81a41" path="/var/lib/kubelet/pods/66a013c9-01a4-4027-99ec-185312c81a41/volumes" Jan 22 14:18:35 crc kubenswrapper[5017]: I0122 14:18:35.057722 5017 generic.go:334] "Generic (PLEG): container finished" podID="e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" containerID="d82053623bf1e0d35f81b66aa470c12ec258b0e353f17d69d9b56e4970f71d51" exitCode=0 Jan 22 14:18:35 crc kubenswrapper[5017]: I0122 14:18:35.057824 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" event={"ID":"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0","Type":"ContainerDied","Data":"d82053623bf1e0d35f81b66aa470c12ec258b0e353f17d69d9b56e4970f71d51"} Jan 22 14:18:35 crc kubenswrapper[5017]: I0122 14:18:35.144553 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d9lvp"] Jan 22 14:18:36 crc kubenswrapper[5017]: I0122 14:18:36.067606 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-d9lvp" podUID="808690d6-427f-4171-b7e6-6e1fa277f227" containerName="registry-server" containerID="cri-o://31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977" gracePeriod=2 Jan 22 14:18:36 crc kubenswrapper[5017]: I0122 14:18:36.750817 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:36 crc kubenswrapper[5017]: I0122 14:18:36.951827 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-catalog-content\") pod \"808690d6-427f-4171-b7e6-6e1fa277f227\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " Jan 22 14:18:36 crc kubenswrapper[5017]: I0122 14:18:36.951919 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zhnd\" (UniqueName: \"kubernetes.io/projected/808690d6-427f-4171-b7e6-6e1fa277f227-kube-api-access-6zhnd\") pod \"808690d6-427f-4171-b7e6-6e1fa277f227\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " Jan 22 14:18:36 crc kubenswrapper[5017]: I0122 14:18:36.952064 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-utilities\") pod \"808690d6-427f-4171-b7e6-6e1fa277f227\" (UID: \"808690d6-427f-4171-b7e6-6e1fa277f227\") " Jan 22 14:18:36 crc kubenswrapper[5017]: I0122 14:18:36.954727 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-utilities" (OuterVolumeSpecName: "utilities") pod "808690d6-427f-4171-b7e6-6e1fa277f227" (UID: "808690d6-427f-4171-b7e6-6e1fa277f227"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:18:36 crc kubenswrapper[5017]: I0122 14:18:36.958995 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:36 crc kubenswrapper[5017]: I0122 14:18:36.963355 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/808690d6-427f-4171-b7e6-6e1fa277f227-kube-api-access-6zhnd" (OuterVolumeSpecName: "kube-api-access-6zhnd") pod "808690d6-427f-4171-b7e6-6e1fa277f227" (UID: "808690d6-427f-4171-b7e6-6e1fa277f227"). InnerVolumeSpecName "kube-api-access-6zhnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.017914 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "808690d6-427f-4171-b7e6-6e1fa277f227" (UID: "808690d6-427f-4171-b7e6-6e1fa277f227"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.059799 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/808690d6-427f-4171-b7e6-6e1fa277f227-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.059839 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zhnd\" (UniqueName: \"kubernetes.io/projected/808690d6-427f-4171-b7e6-6e1fa277f227-kube-api-access-6zhnd\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.076068 5017 generic.go:334] "Generic (PLEG): container finished" podID="808690d6-427f-4171-b7e6-6e1fa277f227" containerID="31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977" exitCode=0 Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.076178 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d9lvp" event={"ID":"808690d6-427f-4171-b7e6-6e1fa277f227","Type":"ContainerDied","Data":"31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977"} Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.076223 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d9lvp" event={"ID":"808690d6-427f-4171-b7e6-6e1fa277f227","Type":"ContainerDied","Data":"1a5a1c4335ebaa6ca5d6cca24b9313f0850ea2fe66d05721bb96ac4c5d1018d5"} Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.076250 5017 scope.go:117] "RemoveContainer" containerID="31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.076287 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d9lvp" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.078808 5017 generic.go:334] "Generic (PLEG): container finished" podID="e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" containerID="8b5828e71ce322c4ae72a9cd98709b49af1f2ef87582dbe5a41ad44ec9f4c077" exitCode=0 Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.078869 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" event={"ID":"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0","Type":"ContainerDied","Data":"8b5828e71ce322c4ae72a9cd98709b49af1f2ef87582dbe5a41ad44ec9f4c077"} Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.101297 5017 scope.go:117] "RemoveContainer" containerID="5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.130223 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d9lvp"] Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.132575 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-d9lvp"] Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.144711 5017 scope.go:117] "RemoveContainer" containerID="1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.160923 5017 scope.go:117] "RemoveContainer" containerID="31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977" Jan 22 14:18:37 crc kubenswrapper[5017]: E0122 14:18:37.162305 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977\": container with ID starting with 31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977 not found: ID does not exist" containerID="31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.162350 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977"} err="failed to get container status \"31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977\": rpc error: code = NotFound desc = could not find container \"31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977\": container with ID starting with 31b6a45435f2fa19a6f39336f005675dde11a213a2a18222b301e720717e9977 not found: ID does not exist" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.162378 5017 scope.go:117] "RemoveContainer" containerID="5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2" Jan 22 14:18:37 crc kubenswrapper[5017]: E0122 14:18:37.162920 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2\": container with ID starting with 5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2 not found: ID does not exist" containerID="5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.162970 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2"} err="failed to get container status \"5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2\": rpc error: code = NotFound desc = could not find container \"5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2\": container with ID starting with 5e2aec60362e925279a66607d41def9471f41d4f0d3831a4b5dfec020e3983e2 not found: ID does not exist" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.162986 5017 scope.go:117] "RemoveContainer" containerID="1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52" Jan 22 14:18:37 crc kubenswrapper[5017]: E0122 14:18:37.163456 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52\": container with ID starting with 1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52 not found: ID does not exist" containerID="1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.163641 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52"} err="failed to get container status \"1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52\": rpc error: code = NotFound desc = could not find container \"1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52\": container with ID starting with 1f534bd07941dfc5cc485210b6724010e5e59cfac1faffd297212684fab1bf52 not found: ID does not exist" Jan 22 14:18:37 crc kubenswrapper[5017]: I0122 14:18:37.268288 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="808690d6-427f-4171-b7e6-6e1fa277f227" path="/var/lib/kubelet/pods/808690d6-427f-4171-b7e6-6e1fa277f227/volumes" Jan 22 14:18:38 crc kubenswrapper[5017]: I0122 14:18:38.347558 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:38 crc kubenswrapper[5017]: I0122 14:18:38.376970 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9msw8\" (UniqueName: \"kubernetes.io/projected/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-kube-api-access-9msw8\") pod \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " Jan 22 14:18:38 crc kubenswrapper[5017]: I0122 14:18:38.377077 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-util\") pod \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " Jan 22 14:18:38 crc kubenswrapper[5017]: I0122 14:18:38.377159 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-bundle\") pod \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\" (UID: \"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0\") " Jan 22 14:18:38 crc kubenswrapper[5017]: I0122 14:18:38.378432 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-bundle" (OuterVolumeSpecName: "bundle") pod "e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" (UID: "e5ffedf9-b8ab-434a-aaf7-bc816bec0de0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:18:38 crc kubenswrapper[5017]: I0122 14:18:38.382568 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-kube-api-access-9msw8" (OuterVolumeSpecName: "kube-api-access-9msw8") pod "e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" (UID: "e5ffedf9-b8ab-434a-aaf7-bc816bec0de0"). InnerVolumeSpecName "kube-api-access-9msw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:18:38 crc kubenswrapper[5017]: I0122 14:18:38.389233 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-util" (OuterVolumeSpecName: "util") pod "e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" (UID: "e5ffedf9-b8ab-434a-aaf7-bc816bec0de0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:18:38 crc kubenswrapper[5017]: I0122 14:18:38.478502 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9msw8\" (UniqueName: \"kubernetes.io/projected/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-kube-api-access-9msw8\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:38 crc kubenswrapper[5017]: I0122 14:18:38.478560 5017 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-util\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:38 crc kubenswrapper[5017]: I0122 14:18:38.478572 5017 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5ffedf9-b8ab-434a-aaf7-bc816bec0de0-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:39 crc kubenswrapper[5017]: I0122 14:18:39.100280 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" event={"ID":"e5ffedf9-b8ab-434a-aaf7-bc816bec0de0","Type":"ContainerDied","Data":"35b8ad21d188d829f746610e486492783310a0fe2dcf78e4c84db864b5224137"} Jan 22 14:18:39 crc kubenswrapper[5017]: I0122 14:18:39.100337 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35b8ad21d188d829f746610e486492783310a0fe2dcf78e4c84db864b5224137" Jan 22 14:18:39 crc kubenswrapper[5017]: I0122 14:18:39.100966 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.754605 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5579759f95-242br"] Jan 22 14:18:47 crc kubenswrapper[5017]: E0122 14:18:47.755768 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="808690d6-427f-4171-b7e6-6e1fa277f227" containerName="extract-utilities" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.755785 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="808690d6-427f-4171-b7e6-6e1fa277f227" containerName="extract-utilities" Jan 22 14:18:47 crc kubenswrapper[5017]: E0122 14:18:47.755796 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="808690d6-427f-4171-b7e6-6e1fa277f227" containerName="extract-content" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.755803 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="808690d6-427f-4171-b7e6-6e1fa277f227" containerName="extract-content" Jan 22 14:18:47 crc kubenswrapper[5017]: E0122 14:18:47.755817 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" containerName="pull" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.755824 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" containerName="pull" Jan 22 14:18:47 crc kubenswrapper[5017]: E0122 14:18:47.755834 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66a013c9-01a4-4027-99ec-185312c81a41" containerName="console" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.755841 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="66a013c9-01a4-4027-99ec-185312c81a41" containerName="console" Jan 22 14:18:47 crc kubenswrapper[5017]: E0122 14:18:47.755850 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="808690d6-427f-4171-b7e6-6e1fa277f227" containerName="registry-server" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.755856 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="808690d6-427f-4171-b7e6-6e1fa277f227" containerName="registry-server" Jan 22 14:18:47 crc kubenswrapper[5017]: E0122 14:18:47.755869 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" containerName="extract" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.755879 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" containerName="extract" Jan 22 14:18:47 crc kubenswrapper[5017]: E0122 14:18:47.755892 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" containerName="util" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.755899 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" containerName="util" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.756049 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5ffedf9-b8ab-434a-aaf7-bc816bec0de0" containerName="extract" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.756063 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="66a013c9-01a4-4027-99ec-185312c81a41" containerName="console" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.756080 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="808690d6-427f-4171-b7e6-6e1fa277f227" containerName="registry-server" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.756664 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.760032 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.760465 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.760557 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.760695 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-szl8n" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.761703 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.771455 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5579759f95-242br"] Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.776259 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tph4m\" (UniqueName: \"kubernetes.io/projected/ce374248-2941-4733-9baa-7b071a8a4e51-kube-api-access-tph4m\") pod \"metallb-operator-controller-manager-5579759f95-242br\" (UID: \"ce374248-2941-4733-9baa-7b071a8a4e51\") " pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.776330 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ce374248-2941-4733-9baa-7b071a8a4e51-webhook-cert\") pod \"metallb-operator-controller-manager-5579759f95-242br\" (UID: \"ce374248-2941-4733-9baa-7b071a8a4e51\") " pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.776624 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ce374248-2941-4733-9baa-7b071a8a4e51-apiservice-cert\") pod \"metallb-operator-controller-manager-5579759f95-242br\" (UID: \"ce374248-2941-4733-9baa-7b071a8a4e51\") " pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.878243 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tph4m\" (UniqueName: \"kubernetes.io/projected/ce374248-2941-4733-9baa-7b071a8a4e51-kube-api-access-tph4m\") pod \"metallb-operator-controller-manager-5579759f95-242br\" (UID: \"ce374248-2941-4733-9baa-7b071a8a4e51\") " pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.878341 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ce374248-2941-4733-9baa-7b071a8a4e51-webhook-cert\") pod \"metallb-operator-controller-manager-5579759f95-242br\" (UID: \"ce374248-2941-4733-9baa-7b071a8a4e51\") " pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.878370 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ce374248-2941-4733-9baa-7b071a8a4e51-apiservice-cert\") pod \"metallb-operator-controller-manager-5579759f95-242br\" (UID: \"ce374248-2941-4733-9baa-7b071a8a4e51\") " pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.887424 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ce374248-2941-4733-9baa-7b071a8a4e51-webhook-cert\") pod \"metallb-operator-controller-manager-5579759f95-242br\" (UID: \"ce374248-2941-4733-9baa-7b071a8a4e51\") " pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.890388 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ce374248-2941-4733-9baa-7b071a8a4e51-apiservice-cert\") pod \"metallb-operator-controller-manager-5579759f95-242br\" (UID: \"ce374248-2941-4733-9baa-7b071a8a4e51\") " pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:47 crc kubenswrapper[5017]: I0122 14:18:47.919038 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tph4m\" (UniqueName: \"kubernetes.io/projected/ce374248-2941-4733-9baa-7b071a8a4e51-kube-api-access-tph4m\") pod \"metallb-operator-controller-manager-5579759f95-242br\" (UID: \"ce374248-2941-4733-9baa-7b071a8a4e51\") " pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.076281 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.192697 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm"] Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.200984 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.202900 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm"] Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.204430 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-c4cdw" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.208313 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.216608 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.286036 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75971588-d1b6-4f3f-982f-2137be914a8f-apiservice-cert\") pod \"metallb-operator-webhook-server-f75b486cd-q4zdm\" (UID: \"75971588-d1b6-4f3f-982f-2137be914a8f\") " pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.286148 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhcjj\" (UniqueName: \"kubernetes.io/projected/75971588-d1b6-4f3f-982f-2137be914a8f-kube-api-access-rhcjj\") pod \"metallb-operator-webhook-server-f75b486cd-q4zdm\" (UID: \"75971588-d1b6-4f3f-982f-2137be914a8f\") " pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.286201 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75971588-d1b6-4f3f-982f-2137be914a8f-webhook-cert\") pod \"metallb-operator-webhook-server-f75b486cd-q4zdm\" (UID: \"75971588-d1b6-4f3f-982f-2137be914a8f\") " pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.387460 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhcjj\" (UniqueName: \"kubernetes.io/projected/75971588-d1b6-4f3f-982f-2137be914a8f-kube-api-access-rhcjj\") pod \"metallb-operator-webhook-server-f75b486cd-q4zdm\" (UID: \"75971588-d1b6-4f3f-982f-2137be914a8f\") " pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.387578 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75971588-d1b6-4f3f-982f-2137be914a8f-webhook-cert\") pod \"metallb-operator-webhook-server-f75b486cd-q4zdm\" (UID: \"75971588-d1b6-4f3f-982f-2137be914a8f\") " pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.387622 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75971588-d1b6-4f3f-982f-2137be914a8f-apiservice-cert\") pod \"metallb-operator-webhook-server-f75b486cd-q4zdm\" (UID: \"75971588-d1b6-4f3f-982f-2137be914a8f\") " pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.397519 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75971588-d1b6-4f3f-982f-2137be914a8f-apiservice-cert\") pod \"metallb-operator-webhook-server-f75b486cd-q4zdm\" (UID: \"75971588-d1b6-4f3f-982f-2137be914a8f\") " pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.397688 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75971588-d1b6-4f3f-982f-2137be914a8f-webhook-cert\") pod \"metallb-operator-webhook-server-f75b486cd-q4zdm\" (UID: \"75971588-d1b6-4f3f-982f-2137be914a8f\") " pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.411836 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhcjj\" (UniqueName: \"kubernetes.io/projected/75971588-d1b6-4f3f-982f-2137be914a8f-kube-api-access-rhcjj\") pod \"metallb-operator-webhook-server-f75b486cd-q4zdm\" (UID: \"75971588-d1b6-4f3f-982f-2137be914a8f\") " pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.520032 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:48 crc kubenswrapper[5017]: I0122 14:18:48.646861 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5579759f95-242br"] Jan 22 14:18:48 crc kubenswrapper[5017]: W0122 14:18:48.661970 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce374248_2941_4733_9baa_7b071a8a4e51.slice/crio-c2790ecfddda5807e33589badb6a2e8777bad457ca9a8900307ceae516839870 WatchSource:0}: Error finding container c2790ecfddda5807e33589badb6a2e8777bad457ca9a8900307ceae516839870: Status 404 returned error can't find the container with id c2790ecfddda5807e33589badb6a2e8777bad457ca9a8900307ceae516839870 Jan 22 14:18:49 crc kubenswrapper[5017]: I0122 14:18:49.024638 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm"] Jan 22 14:18:49 crc kubenswrapper[5017]: W0122 14:18:49.032945 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75971588_d1b6_4f3f_982f_2137be914a8f.slice/crio-86e59328424dce4ee0b397069f0c47fc0fce5b363aa1533fa84e355f710323bf WatchSource:0}: Error finding container 86e59328424dce4ee0b397069f0c47fc0fce5b363aa1533fa84e355f710323bf: Status 404 returned error can't find the container with id 86e59328424dce4ee0b397069f0c47fc0fce5b363aa1533fa84e355f710323bf Jan 22 14:18:49 crc kubenswrapper[5017]: I0122 14:18:49.171262 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" event={"ID":"ce374248-2941-4733-9baa-7b071a8a4e51","Type":"ContainerStarted","Data":"c2790ecfddda5807e33589badb6a2e8777bad457ca9a8900307ceae516839870"} Jan 22 14:18:49 crc kubenswrapper[5017]: I0122 14:18:49.173029 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" event={"ID":"75971588-d1b6-4f3f-982f-2137be914a8f","Type":"ContainerStarted","Data":"86e59328424dce4ee0b397069f0c47fc0fce5b363aa1533fa84e355f710323bf"} Jan 22 14:18:55 crc kubenswrapper[5017]: I0122 14:18:55.763680 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qrlsh"] Jan 22 14:18:55 crc kubenswrapper[5017]: I0122 14:18:55.773901 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:55 crc kubenswrapper[5017]: I0122 14:18:55.948251 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-utilities\") pod \"redhat-marketplace-qrlsh\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:55 crc kubenswrapper[5017]: I0122 14:18:55.948378 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-catalog-content\") pod \"redhat-marketplace-qrlsh\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:55 crc kubenswrapper[5017]: I0122 14:18:55.948469 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzxwl\" (UniqueName: \"kubernetes.io/projected/210085bb-2158-42c3-a9b9-55e295548c26-kube-api-access-gzxwl\") pod \"redhat-marketplace-qrlsh\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:55 crc kubenswrapper[5017]: I0122 14:18:55.965497 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrlsh"] Jan 22 14:18:56 crc kubenswrapper[5017]: I0122 14:18:56.050444 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-catalog-content\") pod \"redhat-marketplace-qrlsh\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:56 crc kubenswrapper[5017]: I0122 14:18:56.050549 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzxwl\" (UniqueName: \"kubernetes.io/projected/210085bb-2158-42c3-a9b9-55e295548c26-kube-api-access-gzxwl\") pod \"redhat-marketplace-qrlsh\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:56 crc kubenswrapper[5017]: I0122 14:18:56.050607 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-utilities\") pod \"redhat-marketplace-qrlsh\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:56 crc kubenswrapper[5017]: I0122 14:18:56.051216 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-catalog-content\") pod \"redhat-marketplace-qrlsh\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:56 crc kubenswrapper[5017]: I0122 14:18:56.051316 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-utilities\") pod \"redhat-marketplace-qrlsh\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:56 crc kubenswrapper[5017]: I0122 14:18:56.076745 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzxwl\" (UniqueName: \"kubernetes.io/projected/210085bb-2158-42c3-a9b9-55e295548c26-kube-api-access-gzxwl\") pod \"redhat-marketplace-qrlsh\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:56 crc kubenswrapper[5017]: I0122 14:18:56.136049 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:18:56 crc kubenswrapper[5017]: I0122 14:18:56.814026 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrlsh"] Jan 22 14:18:57 crc kubenswrapper[5017]: I0122 14:18:57.249724 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" event={"ID":"ce374248-2941-4733-9baa-7b071a8a4e51","Type":"ContainerStarted","Data":"88cdf484ab9c1e2e94e8da52621856b0302117b481ef3f837ebcca1738c87770"} Jan 22 14:18:57 crc kubenswrapper[5017]: I0122 14:18:57.250220 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:18:57 crc kubenswrapper[5017]: I0122 14:18:57.251586 5017 generic.go:334] "Generic (PLEG): container finished" podID="210085bb-2158-42c3-a9b9-55e295548c26" containerID="e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6" exitCode=0 Jan 22 14:18:57 crc kubenswrapper[5017]: I0122 14:18:57.251668 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrlsh" event={"ID":"210085bb-2158-42c3-a9b9-55e295548c26","Type":"ContainerDied","Data":"e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6"} Jan 22 14:18:57 crc kubenswrapper[5017]: I0122 14:18:57.251700 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrlsh" event={"ID":"210085bb-2158-42c3-a9b9-55e295548c26","Type":"ContainerStarted","Data":"fb170ed501fe93deda04e8cfa00b5cec420ef3bff039f9dc9c76ea7e5123aa70"} Jan 22 14:18:57 crc kubenswrapper[5017]: I0122 14:18:57.270035 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" event={"ID":"75971588-d1b6-4f3f-982f-2137be914a8f","Type":"ContainerStarted","Data":"1ee2b506149f51f0ea3cd39accbf39a1be937a38f6db737ea873158d4cba25ae"} Jan 22 14:18:57 crc kubenswrapper[5017]: I0122 14:18:57.270106 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:18:57 crc kubenswrapper[5017]: I0122 14:18:57.285680 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" podStartSLOduration=2.606556575 podStartE2EDuration="10.285645564s" podCreationTimestamp="2026-01-22 14:18:47 +0000 UTC" firstStartedPulling="2026-01-22 14:18:48.664095005 +0000 UTC m=+943.656556863" lastFinishedPulling="2026-01-22 14:18:56.343183994 +0000 UTC m=+951.335645852" observedRunningTime="2026-01-22 14:18:57.279559711 +0000 UTC m=+952.272021569" watchObservedRunningTime="2026-01-22 14:18:57.285645564 +0000 UTC m=+952.278107422" Jan 22 14:18:57 crc kubenswrapper[5017]: I0122 14:18:57.354892 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" podStartSLOduration=1.9894223100000001 podStartE2EDuration="9.354863376s" podCreationTimestamp="2026-01-22 14:18:48 +0000 UTC" firstStartedPulling="2026-01-22 14:18:49.036492973 +0000 UTC m=+944.028954831" lastFinishedPulling="2026-01-22 14:18:56.401934039 +0000 UTC m=+951.394395897" observedRunningTime="2026-01-22 14:18:57.35007908 +0000 UTC m=+952.342540948" watchObservedRunningTime="2026-01-22 14:18:57.354863376 +0000 UTC m=+952.347325234" Jan 22 14:18:58 crc kubenswrapper[5017]: I0122 14:18:58.272280 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrlsh" event={"ID":"210085bb-2158-42c3-a9b9-55e295548c26","Type":"ContainerStarted","Data":"859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4"} Jan 22 14:18:59 crc kubenswrapper[5017]: I0122 14:18:59.300711 5017 generic.go:334] "Generic (PLEG): container finished" podID="210085bb-2158-42c3-a9b9-55e295548c26" containerID="859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4" exitCode=0 Jan 22 14:18:59 crc kubenswrapper[5017]: I0122 14:18:59.301023 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrlsh" event={"ID":"210085bb-2158-42c3-a9b9-55e295548c26","Type":"ContainerDied","Data":"859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4"} Jan 22 14:19:01 crc kubenswrapper[5017]: I0122 14:19:01.319087 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrlsh" event={"ID":"210085bb-2158-42c3-a9b9-55e295548c26","Type":"ContainerStarted","Data":"fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58"} Jan 22 14:19:01 crc kubenswrapper[5017]: I0122 14:19:01.343712 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qrlsh" podStartSLOduration=3.437977884 podStartE2EDuration="6.343679133s" podCreationTimestamp="2026-01-22 14:18:55 +0000 UTC" firstStartedPulling="2026-01-22 14:18:57.253392799 +0000 UTC m=+952.245854657" lastFinishedPulling="2026-01-22 14:19:00.159094038 +0000 UTC m=+955.151555906" observedRunningTime="2026-01-22 14:19:01.33794994 +0000 UTC m=+956.330411798" watchObservedRunningTime="2026-01-22 14:19:01.343679133 +0000 UTC m=+956.336140991" Jan 22 14:19:06 crc kubenswrapper[5017]: I0122 14:19:06.136454 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:19:06 crc kubenswrapper[5017]: I0122 14:19:06.137539 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:19:06 crc kubenswrapper[5017]: I0122 14:19:06.188376 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:19:06 crc kubenswrapper[5017]: I0122 14:19:06.418229 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:19:07 crc kubenswrapper[5017]: I0122 14:19:07.345860 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrlsh"] Jan 22 14:19:08 crc kubenswrapper[5017]: I0122 14:19:08.368929 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qrlsh" podUID="210085bb-2158-42c3-a9b9-55e295548c26" containerName="registry-server" containerID="cri-o://fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58" gracePeriod=2 Jan 22 14:19:08 crc kubenswrapper[5017]: I0122 14:19:08.528826 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-f75b486cd-q4zdm" Jan 22 14:19:08 crc kubenswrapper[5017]: I0122 14:19:08.870404 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:19:08 crc kubenswrapper[5017]: I0122 14:19:08.950473 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzxwl\" (UniqueName: \"kubernetes.io/projected/210085bb-2158-42c3-a9b9-55e295548c26-kube-api-access-gzxwl\") pod \"210085bb-2158-42c3-a9b9-55e295548c26\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " Jan 22 14:19:08 crc kubenswrapper[5017]: I0122 14:19:08.950671 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-utilities\") pod \"210085bb-2158-42c3-a9b9-55e295548c26\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " Jan 22 14:19:08 crc kubenswrapper[5017]: I0122 14:19:08.950833 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-catalog-content\") pod \"210085bb-2158-42c3-a9b9-55e295548c26\" (UID: \"210085bb-2158-42c3-a9b9-55e295548c26\") " Jan 22 14:19:08 crc kubenswrapper[5017]: I0122 14:19:08.951780 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-utilities" (OuterVolumeSpecName: "utilities") pod "210085bb-2158-42c3-a9b9-55e295548c26" (UID: "210085bb-2158-42c3-a9b9-55e295548c26"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:08 crc kubenswrapper[5017]: I0122 14:19:08.958505 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210085bb-2158-42c3-a9b9-55e295548c26-kube-api-access-gzxwl" (OuterVolumeSpecName: "kube-api-access-gzxwl") pod "210085bb-2158-42c3-a9b9-55e295548c26" (UID: "210085bb-2158-42c3-a9b9-55e295548c26"). InnerVolumeSpecName "kube-api-access-gzxwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.052036 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.052084 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzxwl\" (UniqueName: \"kubernetes.io/projected/210085bb-2158-42c3-a9b9-55e295548c26-kube-api-access-gzxwl\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.377084 5017 generic.go:334] "Generic (PLEG): container finished" podID="210085bb-2158-42c3-a9b9-55e295548c26" containerID="fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58" exitCode=0 Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.377192 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qrlsh" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.377171 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrlsh" event={"ID":"210085bb-2158-42c3-a9b9-55e295548c26","Type":"ContainerDied","Data":"fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58"} Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.377798 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qrlsh" event={"ID":"210085bb-2158-42c3-a9b9-55e295548c26","Type":"ContainerDied","Data":"fb170ed501fe93deda04e8cfa00b5cec420ef3bff039f9dc9c76ea7e5123aa70"} Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.377827 5017 scope.go:117] "RemoveContainer" containerID="fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.400447 5017 scope.go:117] "RemoveContainer" containerID="859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.416264 5017 scope.go:117] "RemoveContainer" containerID="e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.435715 5017 scope.go:117] "RemoveContainer" containerID="fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58" Jan 22 14:19:09 crc kubenswrapper[5017]: E0122 14:19:09.436243 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58\": container with ID starting with fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58 not found: ID does not exist" containerID="fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.436280 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58"} err="failed to get container status \"fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58\": rpc error: code = NotFound desc = could not find container \"fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58\": container with ID starting with fb16e5ee0a843bd881034912603c658b820409145e7b60b9981e2805dc210d58 not found: ID does not exist" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.436309 5017 scope.go:117] "RemoveContainer" containerID="859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4" Jan 22 14:19:09 crc kubenswrapper[5017]: E0122 14:19:09.436830 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4\": container with ID starting with 859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4 not found: ID does not exist" containerID="859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.436914 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4"} err="failed to get container status \"859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4\": rpc error: code = NotFound desc = could not find container \"859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4\": container with ID starting with 859a3380214d373a833508c59d9e17e1835c5eecb2700ae0e72a5ba326ab4ae4 not found: ID does not exist" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.436957 5017 scope.go:117] "RemoveContainer" containerID="e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6" Jan 22 14:19:09 crc kubenswrapper[5017]: E0122 14:19:09.437635 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6\": container with ID starting with e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6 not found: ID does not exist" containerID="e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.437692 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6"} err="failed to get container status \"e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6\": rpc error: code = NotFound desc = could not find container \"e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6\": container with ID starting with e6e3912bf159ba5d104af9178c2b16101fd80f0d305d90967124a797d7a71bd6 not found: ID does not exist" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.904095 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "210085bb-2158-42c3-a9b9-55e295548c26" (UID: "210085bb-2158-42c3-a9b9-55e295548c26"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:09 crc kubenswrapper[5017]: I0122 14:19:09.967557 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/210085bb-2158-42c3-a9b9-55e295548c26-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:10 crc kubenswrapper[5017]: I0122 14:19:10.012621 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrlsh"] Jan 22 14:19:10 crc kubenswrapper[5017]: I0122 14:19:10.017710 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qrlsh"] Jan 22 14:19:11 crc kubenswrapper[5017]: I0122 14:19:11.266841 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210085bb-2158-42c3-a9b9-55e295548c26" path="/var/lib/kubelet/pods/210085bb-2158-42c3-a9b9-55e295548c26/volumes" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.081033 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5579759f95-242br" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.805759 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-p22kj"] Jan 22 14:19:28 crc kubenswrapper[5017]: E0122 14:19:28.806548 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210085bb-2158-42c3-a9b9-55e295548c26" containerName="registry-server" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.806566 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="210085bb-2158-42c3-a9b9-55e295548c26" containerName="registry-server" Jan 22 14:19:28 crc kubenswrapper[5017]: E0122 14:19:28.806585 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210085bb-2158-42c3-a9b9-55e295548c26" containerName="extract-utilities" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.806591 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="210085bb-2158-42c3-a9b9-55e295548c26" containerName="extract-utilities" Jan 22 14:19:28 crc kubenswrapper[5017]: E0122 14:19:28.806600 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210085bb-2158-42c3-a9b9-55e295548c26" containerName="extract-content" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.806609 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="210085bb-2158-42c3-a9b9-55e295548c26" containerName="extract-content" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.806761 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="210085bb-2158-42c3-a9b9-55e295548c26" containerName="registry-server" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.810166 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.813213 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-jcrx6" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.813563 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.813983 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.821024 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d"] Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.822101 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.827347 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.836048 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d"] Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.891229 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6b94cba9-db08-403c-acd4-892f04863fa2-frr-startup\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.891324 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-metrics\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.891592 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6b94cba9-db08-403c-acd4-892f04863fa2-metrics-certs\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.891672 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz6md\" (UniqueName: \"kubernetes.io/projected/d9d4ff3a-c59f-4b96-a00e-832089e6b4f9-kube-api-access-zz6md\") pod \"frr-k8s-webhook-server-7df86c4f6c-tc22d\" (UID: \"d9d4ff3a-c59f-4b96-a00e-832089e6b4f9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.891743 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-frr-conf\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.891828 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46rsc\" (UniqueName: \"kubernetes.io/projected/6b94cba9-db08-403c-acd4-892f04863fa2-kube-api-access-46rsc\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.891865 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-frr-sockets\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.891885 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d9d4ff3a-c59f-4b96-a00e-832089e6b4f9-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-tc22d\" (UID: \"d9d4ff3a-c59f-4b96-a00e-832089e6b4f9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.891905 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-reloader\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.916161 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-xsq75"] Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.917702 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-xsq75" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.923300 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.923339 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-qv9rz" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.923434 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.923527 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.929949 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-6j6zr"] Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.931077 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.935278 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.968207 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-6j6zr"] Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993417 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-frr-conf\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993484 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46rsc\" (UniqueName: \"kubernetes.io/projected/6b94cba9-db08-403c-acd4-892f04863fa2-kube-api-access-46rsc\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993512 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-frr-sockets\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993529 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d9d4ff3a-c59f-4b96-a00e-832089e6b4f9-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-tc22d\" (UID: \"d9d4ff3a-c59f-4b96-a00e-832089e6b4f9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993547 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-reloader\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993569 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-metrics-certs\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993613 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w95xz\" (UniqueName: \"kubernetes.io/projected/6423026b-4d48-4bac-8a25-3c8fb0d89bbf-kube-api-access-w95xz\") pod \"controller-6968d8fdc4-6j6zr\" (UID: \"6423026b-4d48-4bac-8a25-3c8fb0d89bbf\") " pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993642 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6423026b-4d48-4bac-8a25-3c8fb0d89bbf-metrics-certs\") pod \"controller-6968d8fdc4-6j6zr\" (UID: \"6423026b-4d48-4bac-8a25-3c8fb0d89bbf\") " pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993666 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6b94cba9-db08-403c-acd4-892f04863fa2-frr-startup\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993692 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6423026b-4d48-4bac-8a25-3c8fb0d89bbf-cert\") pod \"controller-6968d8fdc4-6j6zr\" (UID: \"6423026b-4d48-4bac-8a25-3c8fb0d89bbf\") " pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993727 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-memberlist\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.993749 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7zp7\" (UniqueName: \"kubernetes.io/projected/44f57db2-6ae9-42c4-bf1e-12a301657172-kube-api-access-r7zp7\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:28 crc kubenswrapper[5017]: E0122 14:19:28.993861 5017 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Jan 22 14:19:28 crc kubenswrapper[5017]: E0122 14:19:28.993985 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d9d4ff3a-c59f-4b96-a00e-832089e6b4f9-cert podName:d9d4ff3a-c59f-4b96-a00e-832089e6b4f9 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:29.493957986 +0000 UTC m=+984.486420054 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d9d4ff3a-c59f-4b96-a00e-832089e6b4f9-cert") pod "frr-k8s-webhook-server-7df86c4f6c-tc22d" (UID: "d9d4ff3a-c59f-4b96-a00e-832089e6b4f9") : secret "frr-k8s-webhook-server-cert" not found Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.994007 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-frr-conf\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.994015 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-metrics\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.994090 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/44f57db2-6ae9-42c4-bf1e-12a301657172-metallb-excludel2\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.994100 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-reloader\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.994151 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6b94cba9-db08-403c-acd4-892f04863fa2-metrics-certs\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.994183 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz6md\" (UniqueName: \"kubernetes.io/projected/d9d4ff3a-c59f-4b96-a00e-832089e6b4f9-kube-api-access-zz6md\") pod \"frr-k8s-webhook-server-7df86c4f6c-tc22d\" (UID: \"d9d4ff3a-c59f-4b96-a00e-832089e6b4f9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.994404 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-metrics\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.994462 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/6b94cba9-db08-403c-acd4-892f04863fa2-frr-sockets\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:28 crc kubenswrapper[5017]: I0122 14:19:28.995073 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/6b94cba9-db08-403c-acd4-892f04863fa2-frr-startup\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.001499 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6b94cba9-db08-403c-acd4-892f04863fa2-metrics-certs\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.015095 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz6md\" (UniqueName: \"kubernetes.io/projected/d9d4ff3a-c59f-4b96-a00e-832089e6b4f9-kube-api-access-zz6md\") pod \"frr-k8s-webhook-server-7df86c4f6c-tc22d\" (UID: \"d9d4ff3a-c59f-4b96-a00e-832089e6b4f9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.016216 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46rsc\" (UniqueName: \"kubernetes.io/projected/6b94cba9-db08-403c-acd4-892f04863fa2-kube-api-access-46rsc\") pod \"frr-k8s-p22kj\" (UID: \"6b94cba9-db08-403c-acd4-892f04863fa2\") " pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.095184 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w95xz\" (UniqueName: \"kubernetes.io/projected/6423026b-4d48-4bac-8a25-3c8fb0d89bbf-kube-api-access-w95xz\") pod \"controller-6968d8fdc4-6j6zr\" (UID: \"6423026b-4d48-4bac-8a25-3c8fb0d89bbf\") " pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.095251 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6423026b-4d48-4bac-8a25-3c8fb0d89bbf-metrics-certs\") pod \"controller-6968d8fdc4-6j6zr\" (UID: \"6423026b-4d48-4bac-8a25-3c8fb0d89bbf\") " pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.095281 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6423026b-4d48-4bac-8a25-3c8fb0d89bbf-cert\") pod \"controller-6968d8fdc4-6j6zr\" (UID: \"6423026b-4d48-4bac-8a25-3c8fb0d89bbf\") " pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.095307 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-memberlist\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.095323 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7zp7\" (UniqueName: \"kubernetes.io/projected/44f57db2-6ae9-42c4-bf1e-12a301657172-kube-api-access-r7zp7\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.095361 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/44f57db2-6ae9-42c4-bf1e-12a301657172-metallb-excludel2\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.095425 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-metrics-certs\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:29 crc kubenswrapper[5017]: E0122 14:19:29.095524 5017 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Jan 22 14:19:29 crc kubenswrapper[5017]: E0122 14:19:29.095598 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-metrics-certs podName:44f57db2-6ae9-42c4-bf1e-12a301657172 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:29.595573121 +0000 UTC m=+984.588034979 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-metrics-certs") pod "speaker-xsq75" (UID: "44f57db2-6ae9-42c4-bf1e-12a301657172") : secret "speaker-certs-secret" not found Jan 22 14:19:29 crc kubenswrapper[5017]: E0122 14:19:29.095595 5017 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 22 14:19:29 crc kubenswrapper[5017]: E0122 14:19:29.095725 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-memberlist podName:44f57db2-6ae9-42c4-bf1e-12a301657172 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:29.595689424 +0000 UTC m=+984.588151472 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-memberlist") pod "speaker-xsq75" (UID: "44f57db2-6ae9-42c4-bf1e-12a301657172") : secret "metallb-memberlist" not found Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.096694 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/44f57db2-6ae9-42c4-bf1e-12a301657172-metallb-excludel2\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.098438 5017 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.100166 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6423026b-4d48-4bac-8a25-3c8fb0d89bbf-metrics-certs\") pod \"controller-6968d8fdc4-6j6zr\" (UID: \"6423026b-4d48-4bac-8a25-3c8fb0d89bbf\") " pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.110806 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6423026b-4d48-4bac-8a25-3c8fb0d89bbf-cert\") pod \"controller-6968d8fdc4-6j6zr\" (UID: \"6423026b-4d48-4bac-8a25-3c8fb0d89bbf\") " pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.114847 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7zp7\" (UniqueName: \"kubernetes.io/projected/44f57db2-6ae9-42c4-bf1e-12a301657172-kube-api-access-r7zp7\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.119779 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w95xz\" (UniqueName: \"kubernetes.io/projected/6423026b-4d48-4bac-8a25-3c8fb0d89bbf-kube-api-access-w95xz\") pod \"controller-6968d8fdc4-6j6zr\" (UID: \"6423026b-4d48-4bac-8a25-3c8fb0d89bbf\") " pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.137364 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.257621 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.502156 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d9d4ff3a-c59f-4b96-a00e-832089e6b4f9-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-tc22d\" (UID: \"d9d4ff3a-c59f-4b96-a00e-832089e6b4f9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.510171 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d9d4ff3a-c59f-4b96-a00e-832089e6b4f9-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-tc22d\" (UID: \"d9d4ff3a-c59f-4b96-a00e-832089e6b4f9\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.604052 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-metrics-certs\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.604201 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-memberlist\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:29 crc kubenswrapper[5017]: E0122 14:19:29.604438 5017 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 22 14:19:29 crc kubenswrapper[5017]: E0122 14:19:29.604515 5017 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-memberlist podName:44f57db2-6ae9-42c4-bf1e-12a301657172 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:30.604490749 +0000 UTC m=+985.596952607 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-memberlist") pod "speaker-xsq75" (UID: "44f57db2-6ae9-42c4-bf1e-12a301657172") : secret "metallb-memberlist" not found Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.608225 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p22kj" event={"ID":"6b94cba9-db08-403c-acd4-892f04863fa2","Type":"ContainerStarted","Data":"f9ec1d152c2d98394215e874ef1d6c7abb4f01249f3fe0c6220c92d2782ab81e"} Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.614145 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-metrics-certs\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.718293 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-6j6zr"] Jan 22 14:19:29 crc kubenswrapper[5017]: I0122 14:19:29.766096 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:30 crc kubenswrapper[5017]: I0122 14:19:30.033832 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d"] Jan 22 14:19:30 crc kubenswrapper[5017]: I0122 14:19:30.617218 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" event={"ID":"d9d4ff3a-c59f-4b96-a00e-832089e6b4f9","Type":"ContainerStarted","Data":"0952b5b5d39378ef521a6752a568f3c58fc0f8485a974bbb59bd5a9e9247a4fd"} Jan 22 14:19:30 crc kubenswrapper[5017]: I0122 14:19:30.621287 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-6j6zr" event={"ID":"6423026b-4d48-4bac-8a25-3c8fb0d89bbf","Type":"ContainerStarted","Data":"4af00aa3ebf06766dba3653dbcf58e4838e6b5bd9b2525f938e257b53c46550a"} Jan 22 14:19:30 crc kubenswrapper[5017]: I0122 14:19:30.621345 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-6j6zr" event={"ID":"6423026b-4d48-4bac-8a25-3c8fb0d89bbf","Type":"ContainerStarted","Data":"e8dbd1364e03ecea3ba70f901dcfcebd4be3306f55a28e9c16fce944532595b8"} Jan 22 14:19:30 crc kubenswrapper[5017]: I0122 14:19:30.621356 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-6j6zr" event={"ID":"6423026b-4d48-4bac-8a25-3c8fb0d89bbf","Type":"ContainerStarted","Data":"196ae2f28fb9430573c1e0bd08d05d9427686b8131484b6cde15071d0a8df3dd"} Jan 22 14:19:30 crc kubenswrapper[5017]: I0122 14:19:30.621525 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:30 crc kubenswrapper[5017]: I0122 14:19:30.625500 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-memberlist\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:30 crc kubenswrapper[5017]: I0122 14:19:30.634646 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/44f57db2-6ae9-42c4-bf1e-12a301657172-memberlist\") pod \"speaker-xsq75\" (UID: \"44f57db2-6ae9-42c4-bf1e-12a301657172\") " pod="metallb-system/speaker-xsq75" Jan 22 14:19:30 crc kubenswrapper[5017]: I0122 14:19:30.653335 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-6j6zr" podStartSLOduration=2.653301468 podStartE2EDuration="2.653301468s" podCreationTimestamp="2026-01-22 14:19:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:19:30.644724764 +0000 UTC m=+985.637186642" watchObservedRunningTime="2026-01-22 14:19:30.653301468 +0000 UTC m=+985.645763326" Jan 22 14:19:30 crc kubenswrapper[5017]: I0122 14:19:30.743009 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-xsq75" Jan 22 14:19:31 crc kubenswrapper[5017]: I0122 14:19:31.636521 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xsq75" event={"ID":"44f57db2-6ae9-42c4-bf1e-12a301657172","Type":"ContainerStarted","Data":"e62d8b3ccaa4b9ce49768ee7c40892dc38eeea3f875f7d980cbb3b307e8a372d"} Jan 22 14:19:31 crc kubenswrapper[5017]: I0122 14:19:31.636896 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xsq75" event={"ID":"44f57db2-6ae9-42c4-bf1e-12a301657172","Type":"ContainerStarted","Data":"afd853c5dd9ef22f39b3f2e15c90fe3bed82ac8b72bbd0e71b79242460be6f2d"} Jan 22 14:19:32 crc kubenswrapper[5017]: I0122 14:19:32.650144 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xsq75" event={"ID":"44f57db2-6ae9-42c4-bf1e-12a301657172","Type":"ContainerStarted","Data":"87e262879f893e781685a696bf1b9ba7cb57c3b76a6194f8b40b1e1fcf1b38de"} Jan 22 14:19:32 crc kubenswrapper[5017]: I0122 14:19:32.650432 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-xsq75" Jan 22 14:19:32 crc kubenswrapper[5017]: I0122 14:19:32.677973 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-xsq75" podStartSLOduration=4.6779459469999995 podStartE2EDuration="4.677945947s" podCreationTimestamp="2026-01-22 14:19:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:19:32.67137494 +0000 UTC m=+987.663836798" watchObservedRunningTime="2026-01-22 14:19:32.677945947 +0000 UTC m=+987.670407805" Jan 22 14:19:39 crc kubenswrapper[5017]: I0122 14:19:39.266369 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-6j6zr" Jan 22 14:19:39 crc kubenswrapper[5017]: I0122 14:19:39.806693 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" event={"ID":"d9d4ff3a-c59f-4b96-a00e-832089e6b4f9","Type":"ContainerStarted","Data":"cd4961f48a1e1a0f8754f5a50bc67fc8032d88afa81b0fb7b2085eb52646eb45"} Jan 22 14:19:39 crc kubenswrapper[5017]: I0122 14:19:39.806905 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:39 crc kubenswrapper[5017]: I0122 14:19:39.809042 5017 generic.go:334] "Generic (PLEG): container finished" podID="6b94cba9-db08-403c-acd4-892f04863fa2" containerID="5cd83ded97018ffb92d41ad064033b697e2c471f0188508d2805b6fba4f33fb0" exitCode=0 Jan 22 14:19:39 crc kubenswrapper[5017]: I0122 14:19:39.809157 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p22kj" event={"ID":"6b94cba9-db08-403c-acd4-892f04863fa2","Type":"ContainerDied","Data":"5cd83ded97018ffb92d41ad064033b697e2c471f0188508d2805b6fba4f33fb0"} Jan 22 14:19:39 crc kubenswrapper[5017]: I0122 14:19:39.830035 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" podStartSLOduration=2.869925169 podStartE2EDuration="11.830005958s" podCreationTimestamp="2026-01-22 14:19:28 +0000 UTC" firstStartedPulling="2026-01-22 14:19:30.055413545 +0000 UTC m=+985.047875393" lastFinishedPulling="2026-01-22 14:19:39.015494324 +0000 UTC m=+994.007956182" observedRunningTime="2026-01-22 14:19:39.827001263 +0000 UTC m=+994.819463121" watchObservedRunningTime="2026-01-22 14:19:39.830005958 +0000 UTC m=+994.822467836" Jan 22 14:19:40 crc kubenswrapper[5017]: I0122 14:19:40.821216 5017 generic.go:334] "Generic (PLEG): container finished" podID="6b94cba9-db08-403c-acd4-892f04863fa2" containerID="f6deb34582fb0d7e7dea338493e66331674203f5b441ff6e8370a35c5a4d342d" exitCode=0 Jan 22 14:19:40 crc kubenswrapper[5017]: I0122 14:19:40.821297 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p22kj" event={"ID":"6b94cba9-db08-403c-acd4-892f04863fa2","Type":"ContainerDied","Data":"f6deb34582fb0d7e7dea338493e66331674203f5b441ff6e8370a35c5a4d342d"} Jan 22 14:19:41 crc kubenswrapper[5017]: I0122 14:19:41.836491 5017 generic.go:334] "Generic (PLEG): container finished" podID="6b94cba9-db08-403c-acd4-892f04863fa2" containerID="ed49745664019ddf5faa47c6123fa22c7fc24c96780910033859add37da71b48" exitCode=0 Jan 22 14:19:41 crc kubenswrapper[5017]: I0122 14:19:41.836582 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p22kj" event={"ID":"6b94cba9-db08-403c-acd4-892f04863fa2","Type":"ContainerDied","Data":"ed49745664019ddf5faa47c6123fa22c7fc24c96780910033859add37da71b48"} Jan 22 14:19:42 crc kubenswrapper[5017]: I0122 14:19:42.855545 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p22kj" event={"ID":"6b94cba9-db08-403c-acd4-892f04863fa2","Type":"ContainerStarted","Data":"bcd8e6f1adb4898dd16b8cb0d4f0915515e19f0142984b97d982483e3cc984da"} Jan 22 14:19:42 crc kubenswrapper[5017]: I0122 14:19:42.855961 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p22kj" event={"ID":"6b94cba9-db08-403c-acd4-892f04863fa2","Type":"ContainerStarted","Data":"4c955358dd14fafcc56c57e0b97e848304dd5208a9384f57428af319abc53d2a"} Jan 22 14:19:42 crc kubenswrapper[5017]: I0122 14:19:42.855974 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p22kj" event={"ID":"6b94cba9-db08-403c-acd4-892f04863fa2","Type":"ContainerStarted","Data":"2de02e2fdf3d2dfaec97ecebdc9137e7f5f72e1513bbe5c07f487e0243c83fa6"} Jan 22 14:19:42 crc kubenswrapper[5017]: I0122 14:19:42.855984 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p22kj" event={"ID":"6b94cba9-db08-403c-acd4-892f04863fa2","Type":"ContainerStarted","Data":"cd9f8a9475a6d5a83fefdf6b079469895f8e0c8bc2a7c7b771f97459b72f3ba0"} Jan 22 14:19:42 crc kubenswrapper[5017]: I0122 14:19:42.855993 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p22kj" event={"ID":"6b94cba9-db08-403c-acd4-892f04863fa2","Type":"ContainerStarted","Data":"5cd5750c4c87dd00b260f7d611636afeda976e2c8b7c43d6b2802b163d207804"} Jan 22 14:19:43 crc kubenswrapper[5017]: I0122 14:19:43.874833 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p22kj" event={"ID":"6b94cba9-db08-403c-acd4-892f04863fa2","Type":"ContainerStarted","Data":"6bf4921dcaaef0f412445f636b9c34cf1a9c574b220441309c85bc537606147d"} Jan 22 14:19:43 crc kubenswrapper[5017]: I0122 14:19:43.876000 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:44 crc kubenswrapper[5017]: I0122 14:19:44.138443 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:44 crc kubenswrapper[5017]: I0122 14:19:44.211796 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-p22kj" Jan 22 14:19:44 crc kubenswrapper[5017]: I0122 14:19:44.247925 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-p22kj" podStartSLOduration=6.536038391 podStartE2EDuration="16.247892127s" podCreationTimestamp="2026-01-22 14:19:28 +0000 UTC" firstStartedPulling="2026-01-22 14:19:29.283903526 +0000 UTC m=+984.276365384" lastFinishedPulling="2026-01-22 14:19:38.995757262 +0000 UTC m=+993.988219120" observedRunningTime="2026-01-22 14:19:43.904379361 +0000 UTC m=+998.896841229" watchObservedRunningTime="2026-01-22 14:19:44.247892127 +0000 UTC m=+999.240354005" Jan 22 14:19:49 crc kubenswrapper[5017]: I0122 14:19:49.772799 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-tc22d" Jan 22 14:19:50 crc kubenswrapper[5017]: I0122 14:19:50.748553 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-xsq75" Jan 22 14:19:52 crc kubenswrapper[5017]: I0122 14:19:52.884194 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:19:52 crc kubenswrapper[5017]: I0122 14:19:52.884304 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:19:53 crc kubenswrapper[5017]: I0122 14:19:53.728128 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-h5jm9"] Jan 22 14:19:53 crc kubenswrapper[5017]: I0122 14:19:53.729606 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-h5jm9" Jan 22 14:19:53 crc kubenswrapper[5017]: I0122 14:19:53.732454 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-4r47c" Jan 22 14:19:53 crc kubenswrapper[5017]: I0122 14:19:53.732538 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 22 14:19:53 crc kubenswrapper[5017]: I0122 14:19:53.732453 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 22 14:19:53 crc kubenswrapper[5017]: I0122 14:19:53.745947 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-h5jm9"] Jan 22 14:19:53 crc kubenswrapper[5017]: I0122 14:19:53.851509 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq877\" (UniqueName: \"kubernetes.io/projected/d63d759a-1b1a-413c-9f83-3c2cefc5f40c-kube-api-access-dq877\") pod \"openstack-operator-index-h5jm9\" (UID: \"d63d759a-1b1a-413c-9f83-3c2cefc5f40c\") " pod="openstack-operators/openstack-operator-index-h5jm9" Jan 22 14:19:53 crc kubenswrapper[5017]: I0122 14:19:53.953065 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq877\" (UniqueName: \"kubernetes.io/projected/d63d759a-1b1a-413c-9f83-3c2cefc5f40c-kube-api-access-dq877\") pod \"openstack-operator-index-h5jm9\" (UID: \"d63d759a-1b1a-413c-9f83-3c2cefc5f40c\") " pod="openstack-operators/openstack-operator-index-h5jm9" Jan 22 14:19:53 crc kubenswrapper[5017]: I0122 14:19:53.977232 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq877\" (UniqueName: \"kubernetes.io/projected/d63d759a-1b1a-413c-9f83-3c2cefc5f40c-kube-api-access-dq877\") pod \"openstack-operator-index-h5jm9\" (UID: \"d63d759a-1b1a-413c-9f83-3c2cefc5f40c\") " pod="openstack-operators/openstack-operator-index-h5jm9" Jan 22 14:19:54 crc kubenswrapper[5017]: I0122 14:19:54.056411 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-h5jm9" Jan 22 14:19:54 crc kubenswrapper[5017]: I0122 14:19:54.503249 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-h5jm9"] Jan 22 14:19:54 crc kubenswrapper[5017]: I0122 14:19:54.517027 5017 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:19:54 crc kubenswrapper[5017]: I0122 14:19:54.967022 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-h5jm9" event={"ID":"d63d759a-1b1a-413c-9f83-3c2cefc5f40c","Type":"ContainerStarted","Data":"aa0bfe0524fad30bb7a8b666d73c5f974a92227485eda5039e160a82241ab24d"} Jan 22 14:19:57 crc kubenswrapper[5017]: I0122 14:19:57.102573 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-h5jm9"] Jan 22 14:19:57 crc kubenswrapper[5017]: I0122 14:19:57.707530 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-ndprg"] Jan 22 14:19:57 crc kubenswrapper[5017]: I0122 14:19:57.709572 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ndprg" Jan 22 14:19:57 crc kubenswrapper[5017]: I0122 14:19:57.718513 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ch4f\" (UniqueName: \"kubernetes.io/projected/11c90858-0991-400a-b9d1-ae05483386bf-kube-api-access-7ch4f\") pod \"openstack-operator-index-ndprg\" (UID: \"11c90858-0991-400a-b9d1-ae05483386bf\") " pod="openstack-operators/openstack-operator-index-ndprg" Jan 22 14:19:57 crc kubenswrapper[5017]: I0122 14:19:57.719160 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ndprg"] Jan 22 14:19:57 crc kubenswrapper[5017]: I0122 14:19:57.821034 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ch4f\" (UniqueName: \"kubernetes.io/projected/11c90858-0991-400a-b9d1-ae05483386bf-kube-api-access-7ch4f\") pod \"openstack-operator-index-ndprg\" (UID: \"11c90858-0991-400a-b9d1-ae05483386bf\") " pod="openstack-operators/openstack-operator-index-ndprg" Jan 22 14:19:57 crc kubenswrapper[5017]: I0122 14:19:57.849815 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ch4f\" (UniqueName: \"kubernetes.io/projected/11c90858-0991-400a-b9d1-ae05483386bf-kube-api-access-7ch4f\") pod \"openstack-operator-index-ndprg\" (UID: \"11c90858-0991-400a-b9d1-ae05483386bf\") " pod="openstack-operators/openstack-operator-index-ndprg" Jan 22 14:19:58 crc kubenswrapper[5017]: I0122 14:19:58.037327 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ndprg" Jan 22 14:19:58 crc kubenswrapper[5017]: I0122 14:19:58.309398 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ndprg"] Jan 22 14:19:58 crc kubenswrapper[5017]: W0122 14:19:58.312159 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11c90858_0991_400a_b9d1_ae05483386bf.slice/crio-fdbb0e8b9099b98416337780b8a78e33bf5d4be2ca739ee88ab186c08eef2de0 WatchSource:0}: Error finding container fdbb0e8b9099b98416337780b8a78e33bf5d4be2ca739ee88ab186c08eef2de0: Status 404 returned error can't find the container with id fdbb0e8b9099b98416337780b8a78e33bf5d4be2ca739ee88ab186c08eef2de0 Jan 22 14:19:59 crc kubenswrapper[5017]: I0122 14:19:59.001632 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ndprg" event={"ID":"11c90858-0991-400a-b9d1-ae05483386bf","Type":"ContainerStarted","Data":"fdbb0e8b9099b98416337780b8a78e33bf5d4be2ca739ee88ab186c08eef2de0"} Jan 22 14:19:59 crc kubenswrapper[5017]: I0122 14:19:59.144223 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-p22kj" Jan 22 14:20:04 crc kubenswrapper[5017]: I0122 14:20:04.042794 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ndprg" event={"ID":"11c90858-0991-400a-b9d1-ae05483386bf","Type":"ContainerStarted","Data":"5b8863748b6f5e873152b4fd23d953b843f140ed5d6f851db04fb8afc296fef1"} Jan 22 14:20:04 crc kubenswrapper[5017]: I0122 14:20:04.045054 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-h5jm9" event={"ID":"d63d759a-1b1a-413c-9f83-3c2cefc5f40c","Type":"ContainerStarted","Data":"0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7"} Jan 22 14:20:04 crc kubenswrapper[5017]: I0122 14:20:04.045382 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-h5jm9" podUID="d63d759a-1b1a-413c-9f83-3c2cefc5f40c" containerName="registry-server" containerID="cri-o://0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7" gracePeriod=2 Jan 22 14:20:04 crc kubenswrapper[5017]: I0122 14:20:04.058324 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-h5jm9" Jan 22 14:20:04 crc kubenswrapper[5017]: I0122 14:20:04.076051 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-ndprg" podStartSLOduration=2.2319708 podStartE2EDuration="7.07602415s" podCreationTimestamp="2026-01-22 14:19:57 +0000 UTC" firstStartedPulling="2026-01-22 14:19:58.318514047 +0000 UTC m=+1013.310975905" lastFinishedPulling="2026-01-22 14:20:03.162567367 +0000 UTC m=+1018.155029255" observedRunningTime="2026-01-22 14:20:04.066951741 +0000 UTC m=+1019.059413649" watchObservedRunningTime="2026-01-22 14:20:04.07602415 +0000 UTC m=+1019.068486018" Jan 22 14:20:04 crc kubenswrapper[5017]: I0122 14:20:04.102002 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-h5jm9" podStartSLOduration=2.452768506 podStartE2EDuration="11.101951128s" podCreationTimestamp="2026-01-22 14:19:53 +0000 UTC" firstStartedPulling="2026-01-22 14:19:54.516468383 +0000 UTC m=+1009.508930281" lastFinishedPulling="2026-01-22 14:20:03.165651035 +0000 UTC m=+1018.158112903" observedRunningTime="2026-01-22 14:20:04.101052993 +0000 UTC m=+1019.093514841" watchObservedRunningTime="2026-01-22 14:20:04.101951128 +0000 UTC m=+1019.094412986" Jan 22 14:20:04 crc kubenswrapper[5017]: I0122 14:20:04.449821 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-h5jm9" Jan 22 14:20:04 crc kubenswrapper[5017]: I0122 14:20:04.642610 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq877\" (UniqueName: \"kubernetes.io/projected/d63d759a-1b1a-413c-9f83-3c2cefc5f40c-kube-api-access-dq877\") pod \"d63d759a-1b1a-413c-9f83-3c2cefc5f40c\" (UID: \"d63d759a-1b1a-413c-9f83-3c2cefc5f40c\") " Jan 22 14:20:04 crc kubenswrapper[5017]: I0122 14:20:04.653965 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d63d759a-1b1a-413c-9f83-3c2cefc5f40c-kube-api-access-dq877" (OuterVolumeSpecName: "kube-api-access-dq877") pod "d63d759a-1b1a-413c-9f83-3c2cefc5f40c" (UID: "d63d759a-1b1a-413c-9f83-3c2cefc5f40c"). InnerVolumeSpecName "kube-api-access-dq877". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:20:04 crc kubenswrapper[5017]: I0122 14:20:04.745253 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq877\" (UniqueName: \"kubernetes.io/projected/d63d759a-1b1a-413c-9f83-3c2cefc5f40c-kube-api-access-dq877\") on node \"crc\" DevicePath \"\"" Jan 22 14:20:05 crc kubenswrapper[5017]: I0122 14:20:05.088569 5017 generic.go:334] "Generic (PLEG): container finished" podID="d63d759a-1b1a-413c-9f83-3c2cefc5f40c" containerID="0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7" exitCode=0 Jan 22 14:20:05 crc kubenswrapper[5017]: I0122 14:20:05.088666 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-h5jm9" Jan 22 14:20:05 crc kubenswrapper[5017]: I0122 14:20:05.088713 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-h5jm9" event={"ID":"d63d759a-1b1a-413c-9f83-3c2cefc5f40c","Type":"ContainerDied","Data":"0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7"} Jan 22 14:20:05 crc kubenswrapper[5017]: I0122 14:20:05.088800 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-h5jm9" event={"ID":"d63d759a-1b1a-413c-9f83-3c2cefc5f40c","Type":"ContainerDied","Data":"aa0bfe0524fad30bb7a8b666d73c5f974a92227485eda5039e160a82241ab24d"} Jan 22 14:20:05 crc kubenswrapper[5017]: I0122 14:20:05.088830 5017 scope.go:117] "RemoveContainer" containerID="0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7" Jan 22 14:20:05 crc kubenswrapper[5017]: I0122 14:20:05.114780 5017 scope.go:117] "RemoveContainer" containerID="0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7" Jan 22 14:20:05 crc kubenswrapper[5017]: E0122 14:20:05.115932 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7\": container with ID starting with 0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7 not found: ID does not exist" containerID="0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7" Jan 22 14:20:05 crc kubenswrapper[5017]: I0122 14:20:05.116032 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7"} err="failed to get container status \"0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7\": rpc error: code = NotFound desc = could not find container \"0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7\": container with ID starting with 0e538af7d93f1aaf5313757b45b83a2ef09816ddc693ea9ee55b21e3286478b7 not found: ID does not exist" Jan 22 14:20:05 crc kubenswrapper[5017]: I0122 14:20:05.132974 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-h5jm9"] Jan 22 14:20:05 crc kubenswrapper[5017]: I0122 14:20:05.139201 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-h5jm9"] Jan 22 14:20:05 crc kubenswrapper[5017]: I0122 14:20:05.275684 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d63d759a-1b1a-413c-9f83-3c2cefc5f40c" path="/var/lib/kubelet/pods/d63d759a-1b1a-413c-9f83-3c2cefc5f40c/volumes" Jan 22 14:20:08 crc kubenswrapper[5017]: I0122 14:20:08.038090 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-ndprg" Jan 22 14:20:08 crc kubenswrapper[5017]: I0122 14:20:08.038578 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-ndprg" Jan 22 14:20:08 crc kubenswrapper[5017]: I0122 14:20:08.075826 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-ndprg" Jan 22 14:20:08 crc kubenswrapper[5017]: I0122 14:20:08.141264 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-ndprg" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.155729 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p"] Jan 22 14:20:09 crc kubenswrapper[5017]: E0122 14:20:09.156768 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d63d759a-1b1a-413c-9f83-3c2cefc5f40c" containerName="registry-server" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.156792 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="d63d759a-1b1a-413c-9f83-3c2cefc5f40c" containerName="registry-server" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.157047 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="d63d759a-1b1a-413c-9f83-3c2cefc5f40c" containerName="registry-server" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.158676 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.161272 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-g97fk" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.165856 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p"] Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.228361 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-bundle\") pod \"7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.228592 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-util\") pod \"7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.228943 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kstwd\" (UniqueName: \"kubernetes.io/projected/359429e7-916c-449a-a8da-d9d2ffd88660-kube-api-access-kstwd\") pod \"7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.330536 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-util\") pod \"7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.330632 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kstwd\" (UniqueName: \"kubernetes.io/projected/359429e7-916c-449a-a8da-d9d2ffd88660-kube-api-access-kstwd\") pod \"7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.330678 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-bundle\") pod \"7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.331332 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-util\") pod \"7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.331385 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-bundle\") pod \"7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.354035 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kstwd\" (UniqueName: \"kubernetes.io/projected/359429e7-916c-449a-a8da-d9d2ffd88660-kube-api-access-kstwd\") pod \"7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.480835 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:09 crc kubenswrapper[5017]: I0122 14:20:09.952175 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p"] Jan 22 14:20:09 crc kubenswrapper[5017]: W0122 14:20:09.961062 5017 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod359429e7_916c_449a_a8da_d9d2ffd88660.slice/crio-68a4a39bc91b11f11ab31fe9f4f09b2750ddd36baff1a0c5c2fea4dc2aeceb63 WatchSource:0}: Error finding container 68a4a39bc91b11f11ab31fe9f4f09b2750ddd36baff1a0c5c2fea4dc2aeceb63: Status 404 returned error can't find the container with id 68a4a39bc91b11f11ab31fe9f4f09b2750ddd36baff1a0c5c2fea4dc2aeceb63 Jan 22 14:20:10 crc kubenswrapper[5017]: I0122 14:20:10.126638 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" event={"ID":"359429e7-916c-449a-a8da-d9d2ffd88660","Type":"ContainerStarted","Data":"68a4a39bc91b11f11ab31fe9f4f09b2750ddd36baff1a0c5c2fea4dc2aeceb63"} Jan 22 14:20:11 crc kubenswrapper[5017]: I0122 14:20:11.140299 5017 generic.go:334] "Generic (PLEG): container finished" podID="359429e7-916c-449a-a8da-d9d2ffd88660" containerID="3e78aaf482f70d0b74b41b01e8f1f066679257c94bb6b5a8466a47d991a539cf" exitCode=0 Jan 22 14:20:11 crc kubenswrapper[5017]: I0122 14:20:11.140478 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" event={"ID":"359429e7-916c-449a-a8da-d9d2ffd88660","Type":"ContainerDied","Data":"3e78aaf482f70d0b74b41b01e8f1f066679257c94bb6b5a8466a47d991a539cf"} Jan 22 14:20:12 crc kubenswrapper[5017]: I0122 14:20:12.152095 5017 generic.go:334] "Generic (PLEG): container finished" podID="359429e7-916c-449a-a8da-d9d2ffd88660" containerID="adf86144eaf4d2e3a01ae7baeaa7899bb38c6201729750aeb33bac8241af017c" exitCode=0 Jan 22 14:20:12 crc kubenswrapper[5017]: I0122 14:20:12.152181 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" event={"ID":"359429e7-916c-449a-a8da-d9d2ffd88660","Type":"ContainerDied","Data":"adf86144eaf4d2e3a01ae7baeaa7899bb38c6201729750aeb33bac8241af017c"} Jan 22 14:20:13 crc kubenswrapper[5017]: I0122 14:20:13.175058 5017 generic.go:334] "Generic (PLEG): container finished" podID="359429e7-916c-449a-a8da-d9d2ffd88660" containerID="6275ba91ffde3e275341cd6ba705bd1e64109da206d429682c20cfe823d17fc9" exitCode=0 Jan 22 14:20:13 crc kubenswrapper[5017]: I0122 14:20:13.175165 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" event={"ID":"359429e7-916c-449a-a8da-d9d2ffd88660","Type":"ContainerDied","Data":"6275ba91ffde3e275341cd6ba705bd1e64109da206d429682c20cfe823d17fc9"} Jan 22 14:20:14 crc kubenswrapper[5017]: I0122 14:20:14.455034 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:14 crc kubenswrapper[5017]: I0122 14:20:14.615575 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kstwd\" (UniqueName: \"kubernetes.io/projected/359429e7-916c-449a-a8da-d9d2ffd88660-kube-api-access-kstwd\") pod \"359429e7-916c-449a-a8da-d9d2ffd88660\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " Jan 22 14:20:14 crc kubenswrapper[5017]: I0122 14:20:14.615989 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-bundle\") pod \"359429e7-916c-449a-a8da-d9d2ffd88660\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " Jan 22 14:20:14 crc kubenswrapper[5017]: I0122 14:20:14.618701 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-util\") pod \"359429e7-916c-449a-a8da-d9d2ffd88660\" (UID: \"359429e7-916c-449a-a8da-d9d2ffd88660\") " Jan 22 14:20:14 crc kubenswrapper[5017]: I0122 14:20:14.618575 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-bundle" (OuterVolumeSpecName: "bundle") pod "359429e7-916c-449a-a8da-d9d2ffd88660" (UID: "359429e7-916c-449a-a8da-d9d2ffd88660"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:20:14 crc kubenswrapper[5017]: I0122 14:20:14.626645 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/359429e7-916c-449a-a8da-d9d2ffd88660-kube-api-access-kstwd" (OuterVolumeSpecName: "kube-api-access-kstwd") pod "359429e7-916c-449a-a8da-d9d2ffd88660" (UID: "359429e7-916c-449a-a8da-d9d2ffd88660"). InnerVolumeSpecName "kube-api-access-kstwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:20:14 crc kubenswrapper[5017]: I0122 14:20:14.634357 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-util" (OuterVolumeSpecName: "util") pod "359429e7-916c-449a-a8da-d9d2ffd88660" (UID: "359429e7-916c-449a-a8da-d9d2ffd88660"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:20:14 crc kubenswrapper[5017]: I0122 14:20:14.723407 5017 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-util\") on node \"crc\" DevicePath \"\"" Jan 22 14:20:14 crc kubenswrapper[5017]: I0122 14:20:14.723456 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kstwd\" (UniqueName: \"kubernetes.io/projected/359429e7-916c-449a-a8da-d9d2ffd88660-kube-api-access-kstwd\") on node \"crc\" DevicePath \"\"" Jan 22 14:20:14 crc kubenswrapper[5017]: I0122 14:20:14.723468 5017 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/359429e7-916c-449a-a8da-d9d2ffd88660-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:20:15 crc kubenswrapper[5017]: I0122 14:20:15.192454 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" event={"ID":"359429e7-916c-449a-a8da-d9d2ffd88660","Type":"ContainerDied","Data":"68a4a39bc91b11f11ab31fe9f4f09b2750ddd36baff1a0c5c2fea4dc2aeceb63"} Jan 22 14:20:15 crc kubenswrapper[5017]: I0122 14:20:15.192571 5017 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68a4a39bc91b11f11ab31fe9f4f09b2750ddd36baff1a0c5c2fea4dc2aeceb63" Jan 22 14:20:15 crc kubenswrapper[5017]: I0122 14:20:15.192623 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p" Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.765797 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52"] Jan 22 14:20:21 crc kubenswrapper[5017]: E0122 14:20:21.766611 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="359429e7-916c-449a-a8da-d9d2ffd88660" containerName="extract" Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.766629 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="359429e7-916c-449a-a8da-d9d2ffd88660" containerName="extract" Jan 22 14:20:21 crc kubenswrapper[5017]: E0122 14:20:21.766639 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="359429e7-916c-449a-a8da-d9d2ffd88660" containerName="util" Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.766646 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="359429e7-916c-449a-a8da-d9d2ffd88660" containerName="util" Jan 22 14:20:21 crc kubenswrapper[5017]: E0122 14:20:21.766673 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="359429e7-916c-449a-a8da-d9d2ffd88660" containerName="pull" Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.766679 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="359429e7-916c-449a-a8da-d9d2ffd88660" containerName="pull" Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.766817 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="359429e7-916c-449a-a8da-d9d2ffd88660" containerName="extract" Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.767425 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52" Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.771495 5017 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-k6db8" Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.792784 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52"] Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.856602 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs7dz\" (UniqueName: \"kubernetes.io/projected/5815f460-c9d9-4cde-b4fe-b89ebfa3a395-kube-api-access-cs7dz\") pod \"openstack-operator-controller-init-9ff957fff-jtx52\" (UID: \"5815f460-c9d9-4cde-b4fe-b89ebfa3a395\") " pod="openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52" Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.958314 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs7dz\" (UniqueName: \"kubernetes.io/projected/5815f460-c9d9-4cde-b4fe-b89ebfa3a395-kube-api-access-cs7dz\") pod \"openstack-operator-controller-init-9ff957fff-jtx52\" (UID: \"5815f460-c9d9-4cde-b4fe-b89ebfa3a395\") " pod="openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52" Jan 22 14:20:21 crc kubenswrapper[5017]: I0122 14:20:21.986625 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs7dz\" (UniqueName: \"kubernetes.io/projected/5815f460-c9d9-4cde-b4fe-b89ebfa3a395-kube-api-access-cs7dz\") pod \"openstack-operator-controller-init-9ff957fff-jtx52\" (UID: \"5815f460-c9d9-4cde-b4fe-b89ebfa3a395\") " pod="openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52" Jan 22 14:20:22 crc kubenswrapper[5017]: I0122 14:20:22.095442 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52" Jan 22 14:20:22 crc kubenswrapper[5017]: I0122 14:20:22.345942 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52"] Jan 22 14:20:22 crc kubenswrapper[5017]: I0122 14:20:22.887442 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:20:22 crc kubenswrapper[5017]: I0122 14:20:22.887842 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:20:23 crc kubenswrapper[5017]: I0122 14:20:23.276666 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52" event={"ID":"5815f460-c9d9-4cde-b4fe-b89ebfa3a395","Type":"ContainerStarted","Data":"9216956f950d79a219dd9a39d750cb30d72d3f0cd0ab9917a260f678e0306001"} Jan 22 14:20:28 crc kubenswrapper[5017]: I0122 14:20:28.310004 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52" event={"ID":"5815f460-c9d9-4cde-b4fe-b89ebfa3a395","Type":"ContainerStarted","Data":"9ecd30efef2191710fa8128290882a79b8eb6bee50c05fbcd354ded345dcecb0"} Jan 22 14:20:28 crc kubenswrapper[5017]: I0122 14:20:28.310995 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52" Jan 22 14:20:28 crc kubenswrapper[5017]: I0122 14:20:28.342536 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52" podStartSLOduration=2.01024734 podStartE2EDuration="7.342502755s" podCreationTimestamp="2026-01-22 14:20:21 +0000 UTC" firstStartedPulling="2026-01-22 14:20:22.361019752 +0000 UTC m=+1037.353481610" lastFinishedPulling="2026-01-22 14:20:27.693275177 +0000 UTC m=+1042.685737025" observedRunningTime="2026-01-22 14:20:28.341339761 +0000 UTC m=+1043.333801639" watchObservedRunningTime="2026-01-22 14:20:28.342502755 +0000 UTC m=+1043.334964623" Jan 22 14:20:32 crc kubenswrapper[5017]: I0122 14:20:32.100433 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-9ff957fff-jtx52" Jan 22 14:20:52 crc kubenswrapper[5017]: I0122 14:20:52.883725 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:20:52 crc kubenswrapper[5017]: I0122 14:20:52.884829 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:20:52 crc kubenswrapper[5017]: I0122 14:20:52.884907 5017 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:20:52 crc kubenswrapper[5017]: I0122 14:20:52.886065 5017 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1d27c3a5c58b1e6c68ef017f8d5adf66a05dff18c5c08e0c60bb0b5a99e32c2a"} pod="openshift-machine-config-operator/machine-config-daemon-cr62h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:20:52 crc kubenswrapper[5017]: I0122 14:20:52.886226 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" containerID="cri-o://1d27c3a5c58b1e6c68ef017f8d5adf66a05dff18c5c08e0c60bb0b5a99e32c2a" gracePeriod=600 Jan 22 14:20:53 crc kubenswrapper[5017]: I0122 14:20:53.552429 5017 generic.go:334] "Generic (PLEG): container finished" podID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerID="1d27c3a5c58b1e6c68ef017f8d5adf66a05dff18c5c08e0c60bb0b5a99e32c2a" exitCode=0 Jan 22 14:20:53 crc kubenswrapper[5017]: I0122 14:20:53.552487 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerDied","Data":"1d27c3a5c58b1e6c68ef017f8d5adf66a05dff18c5c08e0c60bb0b5a99e32c2a"} Jan 22 14:20:53 crc kubenswrapper[5017]: I0122 14:20:53.553016 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerStarted","Data":"782e38edfb21bed0b225c18b4748d2c32ff4bf0b56845bdb1db63cb7f0114015"} Jan 22 14:20:53 crc kubenswrapper[5017]: I0122 14:20:53.553263 5017 scope.go:117] "RemoveContainer" containerID="d5ba9ef9161473c8ce388bf5a9a68f3a95a5b876f5893ae82a825172bb9859d5" Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.346711 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-x5wtw/must-gather-n52wm"] Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.358465 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-x5wtw/must-gather-n52wm" Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.369643 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-x5wtw"/"kube-root-ca.crt" Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.369903 5017 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-x5wtw"/"openshift-service-ca.crt" Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.380610 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-x5wtw/must-gather-n52wm"] Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.467244 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpj6p\" (UniqueName: \"kubernetes.io/projected/0cf4f99f-28be-4590-a117-df6a6406f6ae-kube-api-access-qpj6p\") pod \"must-gather-n52wm\" (UID: \"0cf4f99f-28be-4590-a117-df6a6406f6ae\") " pod="openshift-must-gather-x5wtw/must-gather-n52wm" Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.467318 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0cf4f99f-28be-4590-a117-df6a6406f6ae-must-gather-output\") pod \"must-gather-n52wm\" (UID: \"0cf4f99f-28be-4590-a117-df6a6406f6ae\") " pod="openshift-must-gather-x5wtw/must-gather-n52wm" Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.569925 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpj6p\" (UniqueName: \"kubernetes.io/projected/0cf4f99f-28be-4590-a117-df6a6406f6ae-kube-api-access-qpj6p\") pod \"must-gather-n52wm\" (UID: \"0cf4f99f-28be-4590-a117-df6a6406f6ae\") " pod="openshift-must-gather-x5wtw/must-gather-n52wm" Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.570001 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0cf4f99f-28be-4590-a117-df6a6406f6ae-must-gather-output\") pod \"must-gather-n52wm\" (UID: \"0cf4f99f-28be-4590-a117-df6a6406f6ae\") " pod="openshift-must-gather-x5wtw/must-gather-n52wm" Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.570584 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0cf4f99f-28be-4590-a117-df6a6406f6ae-must-gather-output\") pod \"must-gather-n52wm\" (UID: \"0cf4f99f-28be-4590-a117-df6a6406f6ae\") " pod="openshift-must-gather-x5wtw/must-gather-n52wm" Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.593498 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpj6p\" (UniqueName: \"kubernetes.io/projected/0cf4f99f-28be-4590-a117-df6a6406f6ae-kube-api-access-qpj6p\") pod \"must-gather-n52wm\" (UID: \"0cf4f99f-28be-4590-a117-df6a6406f6ae\") " pod="openshift-must-gather-x5wtw/must-gather-n52wm" Jan 22 14:21:28 crc kubenswrapper[5017]: I0122 14:21:28.690127 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-x5wtw/must-gather-n52wm" Jan 22 14:21:29 crc kubenswrapper[5017]: I0122 14:21:29.065854 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-x5wtw/must-gather-n52wm"] Jan 22 14:21:29 crc kubenswrapper[5017]: I0122 14:21:29.870914 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-x5wtw/must-gather-n52wm" event={"ID":"0cf4f99f-28be-4590-a117-df6a6406f6ae","Type":"ContainerStarted","Data":"c3b928f2ef3bf98ce668d63c467555ca05979710305559b21b6eeabd60ad5e30"} Jan 22 14:21:41 crc kubenswrapper[5017]: I0122 14:21:41.115417 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-x5wtw/must-gather-n52wm" event={"ID":"0cf4f99f-28be-4590-a117-df6a6406f6ae","Type":"ContainerStarted","Data":"544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c"} Jan 22 14:21:41 crc kubenswrapper[5017]: I0122 14:21:41.116040 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-x5wtw/must-gather-n52wm" event={"ID":"0cf4f99f-28be-4590-a117-df6a6406f6ae","Type":"ContainerStarted","Data":"58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6"} Jan 22 14:21:41 crc kubenswrapper[5017]: I0122 14:21:41.137661 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-x5wtw/must-gather-n52wm" podStartSLOduration=2.135032337 podStartE2EDuration="13.137639156s" podCreationTimestamp="2026-01-22 14:21:28 +0000 UTC" firstStartedPulling="2026-01-22 14:21:29.07602521 +0000 UTC m=+1104.068487068" lastFinishedPulling="2026-01-22 14:21:40.078632029 +0000 UTC m=+1115.071093887" observedRunningTime="2026-01-22 14:21:41.13566349 +0000 UTC m=+1116.128125368" watchObservedRunningTime="2026-01-22 14:21:41.137639156 +0000 UTC m=+1116.130101014" Jan 22 14:22:01 crc kubenswrapper[5017]: I0122 14:22:01.053705 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph_401cb975-04a3-4410-9163-2f691a919b6d/ceph/0.log" Jan 22 14:22:14 crc kubenswrapper[5017]: I0122 14:22:14.056939 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p_359429e7-916c-449a-a8da-d9d2ffd88660/util/0.log" Jan 22 14:22:14 crc kubenswrapper[5017]: I0122 14:22:14.264966 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p_359429e7-916c-449a-a8da-d9d2ffd88660/pull/0.log" Jan 22 14:22:14 crc kubenswrapper[5017]: I0122 14:22:14.286302 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p_359429e7-916c-449a-a8da-d9d2ffd88660/util/0.log" Jan 22 14:22:14 crc kubenswrapper[5017]: I0122 14:22:14.317248 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p_359429e7-916c-449a-a8da-d9d2ffd88660/pull/0.log" Jan 22 14:22:14 crc kubenswrapper[5017]: I0122 14:22:14.524919 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p_359429e7-916c-449a-a8da-d9d2ffd88660/util/0.log" Jan 22 14:22:14 crc kubenswrapper[5017]: I0122 14:22:14.535552 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p_359429e7-916c-449a-a8da-d9d2ffd88660/extract/0.log" Jan 22 14:22:14 crc kubenswrapper[5017]: I0122 14:22:14.543102 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7fc5189f3ed7e80c3354d9d7b6d4cbe8a166419e085aad87ace8a207af5rl8p_359429e7-916c-449a-a8da-d9d2ffd88660/pull/0.log" Jan 22 14:22:14 crc kubenswrapper[5017]: I0122 14:22:14.733740 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-9ff957fff-jtx52_5815f460-c9d9-4cde-b4fe-b89ebfa3a395/operator/0.log" Jan 22 14:22:14 crc kubenswrapper[5017]: I0122 14:22:14.745615 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-ndprg_11c90858-0991-400a-b9d1-ae05483386bf/registry-server/0.log" Jan 22 14:22:28 crc kubenswrapper[5017]: I0122 14:22:28.080856 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5w4zs_90b0ba9d-f818-4059-a895-fe9947511987/control-plane-machine-set-operator/0.log" Jan 22 14:22:28 crc kubenswrapper[5017]: I0122 14:22:28.301006 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-t47kf_1ee65bdd-e09e-4bf1-8025-ec86426073d1/machine-api-operator/0.log" Jan 22 14:22:28 crc kubenswrapper[5017]: I0122 14:22:28.301011 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-t47kf_1ee65bdd-e09e-4bf1-8025-ec86426073d1/kube-rbac-proxy/0.log" Jan 22 14:22:40 crc kubenswrapper[5017]: I0122 14:22:40.884030 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-2bsnh_ae5b9e89-dc5d-4b4d-aac7-c025f72ca407/cert-manager-controller/0.log" Jan 22 14:22:41 crc kubenswrapper[5017]: I0122 14:22:41.091611 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-thscs_c134ddb1-8fd5-4e73-bb84-f48e269b59ab/cert-manager-cainjector/0.log" Jan 22 14:22:41 crc kubenswrapper[5017]: I0122 14:22:41.111570 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-8nlnj_742ae349-26f7-4244-bcc9-3c7c2373a0b4/cert-manager-webhook/0.log" Jan 22 14:22:54 crc kubenswrapper[5017]: I0122 14:22:54.042003 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-9zpdh_83182e42-1f12-414e-a7ec-d4926434cdf1/nmstate-console-plugin/0.log" Jan 22 14:22:54 crc kubenswrapper[5017]: I0122 14:22:54.207966 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-cpqbx_70e8f966-dc89-408d-b174-5076caad75f3/nmstate-handler/0.log" Jan 22 14:22:54 crc kubenswrapper[5017]: I0122 14:22:54.271972 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-wcr2m_ffe16f13-85bc-4a51-9c0b-c350d92075b7/nmstate-metrics/0.log" Jan 22 14:22:54 crc kubenswrapper[5017]: I0122 14:22:54.283476 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-wcr2m_ffe16f13-85bc-4a51-9c0b-c350d92075b7/kube-rbac-proxy/0.log" Jan 22 14:22:54 crc kubenswrapper[5017]: I0122 14:22:54.473319 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-47q9q_01f86f52-94be-4033-bfed-8e7a2caca5f8/nmstate-operator/0.log" Jan 22 14:22:54 crc kubenswrapper[5017]: I0122 14:22:54.485890 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-bgtxx_80b4685d-56a7-44f8-a881-4dfcce29d323/nmstate-webhook/0.log" Jan 22 14:23:21 crc kubenswrapper[5017]: I0122 14:23:21.379490 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-6j6zr_6423026b-4d48-4bac-8a25-3c8fb0d89bbf/kube-rbac-proxy/0.log" Jan 22 14:23:21 crc kubenswrapper[5017]: I0122 14:23:21.485205 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-6j6zr_6423026b-4d48-4bac-8a25-3c8fb0d89bbf/controller/0.log" Jan 22 14:23:21 crc kubenswrapper[5017]: I0122 14:23:21.596230 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-frr-files/0.log" Jan 22 14:23:21 crc kubenswrapper[5017]: I0122 14:23:21.842908 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-frr-files/0.log" Jan 22 14:23:21 crc kubenswrapper[5017]: I0122 14:23:21.853175 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-metrics/0.log" Jan 22 14:23:21 crc kubenswrapper[5017]: I0122 14:23:21.863762 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-reloader/0.log" Jan 22 14:23:21 crc kubenswrapper[5017]: I0122 14:23:21.898015 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-reloader/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.123291 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-reloader/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.158603 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-metrics/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.168757 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-frr-files/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.168946 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-metrics/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.356216 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-frr-files/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.368260 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-reloader/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.396490 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/cp-metrics/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.480789 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/controller/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.659674 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/frr-metrics/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.718342 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/frr/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.771423 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/kube-rbac-proxy/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.784045 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/kube-rbac-proxy-frr/0.log" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.883695 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.883760 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:23:22 crc kubenswrapper[5017]: I0122 14:23:22.917566 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p22kj_6b94cba9-db08-403c-acd4-892f04863fa2/reloader/0.log" Jan 22 14:23:23 crc kubenswrapper[5017]: I0122 14:23:23.014438 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-tc22d_d9d4ff3a-c59f-4b96-a00e-832089e6b4f9/frr-k8s-webhook-server/0.log" Jan 22 14:23:23 crc kubenswrapper[5017]: I0122 14:23:23.125539 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5579759f95-242br_ce374248-2941-4733-9baa-7b071a8a4e51/manager/0.log" Jan 22 14:23:23 crc kubenswrapper[5017]: I0122 14:23:23.238417 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-f75b486cd-q4zdm_75971588-d1b6-4f3f-982f-2137be914a8f/webhook-server/0.log" Jan 22 14:23:23 crc kubenswrapper[5017]: I0122 14:23:23.390139 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xsq75_44f57db2-6ae9-42c4-bf1e-12a301657172/kube-rbac-proxy/0.log" Jan 22 14:23:23 crc kubenswrapper[5017]: I0122 14:23:23.546445 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xsq75_44f57db2-6ae9-42c4-bf1e-12a301657172/speaker/0.log" Jan 22 14:23:37 crc kubenswrapper[5017]: I0122 14:23:37.307218 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v_e5ffedf9-b8ab-434a-aaf7-bc816bec0de0/util/0.log" Jan 22 14:23:37 crc kubenswrapper[5017]: I0122 14:23:37.445289 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v_e5ffedf9-b8ab-434a-aaf7-bc816bec0de0/util/0.log" Jan 22 14:23:37 crc kubenswrapper[5017]: I0122 14:23:37.460104 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v_e5ffedf9-b8ab-434a-aaf7-bc816bec0de0/pull/0.log" Jan 22 14:23:37 crc kubenswrapper[5017]: I0122 14:23:37.485431 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v_e5ffedf9-b8ab-434a-aaf7-bc816bec0de0/pull/0.log" Jan 22 14:23:37 crc kubenswrapper[5017]: I0122 14:23:37.707233 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v_e5ffedf9-b8ab-434a-aaf7-bc816bec0de0/pull/0.log" Jan 22 14:23:37 crc kubenswrapper[5017]: I0122 14:23:37.717033 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v_e5ffedf9-b8ab-434a-aaf7-bc816bec0de0/extract/0.log" Jan 22 14:23:37 crc kubenswrapper[5017]: I0122 14:23:37.737583 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc6vv6v_e5ffedf9-b8ab-434a-aaf7-bc816bec0de0/util/0.log" Jan 22 14:23:37 crc kubenswrapper[5017]: I0122 14:23:37.907209 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh_870fa842-bac0-4b1f-9cc1-ababf261a783/util/0.log" Jan 22 14:23:38 crc kubenswrapper[5017]: I0122 14:23:38.138824 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh_870fa842-bac0-4b1f-9cc1-ababf261a783/util/0.log" Jan 22 14:23:38 crc kubenswrapper[5017]: I0122 14:23:38.170170 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh_870fa842-bac0-4b1f-9cc1-ababf261a783/pull/0.log" Jan 22 14:23:38 crc kubenswrapper[5017]: I0122 14:23:38.170678 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh_870fa842-bac0-4b1f-9cc1-ababf261a783/pull/0.log" Jan 22 14:23:38 crc kubenswrapper[5017]: I0122 14:23:38.394123 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh_870fa842-bac0-4b1f-9cc1-ababf261a783/pull/0.log" Jan 22 14:23:38 crc kubenswrapper[5017]: I0122 14:23:38.441917 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh_870fa842-bac0-4b1f-9cc1-ababf261a783/util/0.log" Jan 22 14:23:38 crc kubenswrapper[5017]: I0122 14:23:38.502295 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713v9knh_870fa842-bac0-4b1f-9cc1-ababf261a783/extract/0.log" Jan 22 14:23:38 crc kubenswrapper[5017]: I0122 14:23:38.630537 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8jlwk_9a572a90-9417-4bac-a648-227d3f892664/extract-utilities/0.log" Jan 22 14:23:38 crc kubenswrapper[5017]: I0122 14:23:38.802339 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8jlwk_9a572a90-9417-4bac-a648-227d3f892664/extract-utilities/0.log" Jan 22 14:23:38 crc kubenswrapper[5017]: I0122 14:23:38.849340 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8jlwk_9a572a90-9417-4bac-a648-227d3f892664/extract-content/0.log" Jan 22 14:23:38 crc kubenswrapper[5017]: I0122 14:23:38.949586 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8jlwk_9a572a90-9417-4bac-a648-227d3f892664/extract-content/0.log" Jan 22 14:23:39 crc kubenswrapper[5017]: I0122 14:23:39.154625 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8jlwk_9a572a90-9417-4bac-a648-227d3f892664/extract-content/0.log" Jan 22 14:23:39 crc kubenswrapper[5017]: I0122 14:23:39.196890 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8jlwk_9a572a90-9417-4bac-a648-227d3f892664/extract-utilities/0.log" Jan 22 14:23:39 crc kubenswrapper[5017]: I0122 14:23:39.390270 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8jlwk_9a572a90-9417-4bac-a648-227d3f892664/registry-server/0.log" Jan 22 14:23:39 crc kubenswrapper[5017]: I0122 14:23:39.448549 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ghrs9_24e6cfb8-c40c-4b03-873e-6d6ab15f1911/extract-utilities/0.log" Jan 22 14:23:39 crc kubenswrapper[5017]: I0122 14:23:39.611716 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ghrs9_24e6cfb8-c40c-4b03-873e-6d6ab15f1911/extract-utilities/0.log" Jan 22 14:23:39 crc kubenswrapper[5017]: I0122 14:23:39.665221 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ghrs9_24e6cfb8-c40c-4b03-873e-6d6ab15f1911/extract-content/0.log" Jan 22 14:23:39 crc kubenswrapper[5017]: I0122 14:23:39.722742 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ghrs9_24e6cfb8-c40c-4b03-873e-6d6ab15f1911/extract-content/0.log" Jan 22 14:23:39 crc kubenswrapper[5017]: I0122 14:23:39.851550 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ghrs9_24e6cfb8-c40c-4b03-873e-6d6ab15f1911/extract-content/0.log" Jan 22 14:23:39 crc kubenswrapper[5017]: I0122 14:23:39.969765 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ghrs9_24e6cfb8-c40c-4b03-873e-6d6ab15f1911/extract-utilities/0.log" Jan 22 14:23:40 crc kubenswrapper[5017]: I0122 14:23:40.096304 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-5fttm_63cce9b3-3f85-4447-8f12-77f4b35c9d6f/marketplace-operator/0.log" Jan 22 14:23:40 crc kubenswrapper[5017]: I0122 14:23:40.141303 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ghrs9_24e6cfb8-c40c-4b03-873e-6d6ab15f1911/registry-server/0.log" Jan 22 14:23:40 crc kubenswrapper[5017]: I0122 14:23:40.256815 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6hplb_cfba557d-e1ff-4acf-b373-1ac74e4754ee/extract-utilities/0.log" Jan 22 14:23:40 crc kubenswrapper[5017]: I0122 14:23:40.433201 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6hplb_cfba557d-e1ff-4acf-b373-1ac74e4754ee/extract-content/0.log" Jan 22 14:23:40 crc kubenswrapper[5017]: I0122 14:23:40.455188 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6hplb_cfba557d-e1ff-4acf-b373-1ac74e4754ee/extract-content/0.log" Jan 22 14:23:40 crc kubenswrapper[5017]: I0122 14:23:40.471593 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6hplb_cfba557d-e1ff-4acf-b373-1ac74e4754ee/extract-utilities/0.log" Jan 22 14:23:40 crc kubenswrapper[5017]: I0122 14:23:40.649028 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6hplb_cfba557d-e1ff-4acf-b373-1ac74e4754ee/extract-utilities/0.log" Jan 22 14:23:40 crc kubenswrapper[5017]: I0122 14:23:40.653986 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6hplb_cfba557d-e1ff-4acf-b373-1ac74e4754ee/extract-content/0.log" Jan 22 14:23:40 crc kubenswrapper[5017]: I0122 14:23:40.713874 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6hplb_cfba557d-e1ff-4acf-b373-1ac74e4754ee/registry-server/0.log" Jan 22 14:23:40 crc kubenswrapper[5017]: I0122 14:23:40.871934 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wd96l_0742b6c5-1b21-4e65-a1f2-1f50d1751e81/extract-utilities/0.log" Jan 22 14:23:41 crc kubenswrapper[5017]: I0122 14:23:41.047703 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wd96l_0742b6c5-1b21-4e65-a1f2-1f50d1751e81/extract-utilities/0.log" Jan 22 14:23:41 crc kubenswrapper[5017]: I0122 14:23:41.056665 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wd96l_0742b6c5-1b21-4e65-a1f2-1f50d1751e81/extract-content/0.log" Jan 22 14:23:41 crc kubenswrapper[5017]: I0122 14:23:41.098093 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wd96l_0742b6c5-1b21-4e65-a1f2-1f50d1751e81/extract-content/0.log" Jan 22 14:23:41 crc kubenswrapper[5017]: I0122 14:23:41.237150 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wd96l_0742b6c5-1b21-4e65-a1f2-1f50d1751e81/extract-utilities/0.log" Jan 22 14:23:41 crc kubenswrapper[5017]: I0122 14:23:41.298890 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wd96l_0742b6c5-1b21-4e65-a1f2-1f50d1751e81/extract-content/0.log" Jan 22 14:23:41 crc kubenswrapper[5017]: I0122 14:23:41.585121 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wd96l_0742b6c5-1b21-4e65-a1f2-1f50d1751e81/registry-server/0.log" Jan 22 14:23:52 crc kubenswrapper[5017]: I0122 14:23:52.883787 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:23:52 crc kubenswrapper[5017]: I0122 14:23:52.884510 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:24:22 crc kubenswrapper[5017]: I0122 14:24:22.884401 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:24:22 crc kubenswrapper[5017]: I0122 14:24:22.885449 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:24:22 crc kubenswrapper[5017]: I0122 14:24:22.885527 5017 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:24:22 crc kubenswrapper[5017]: I0122 14:24:22.886484 5017 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"782e38edfb21bed0b225c18b4748d2c32ff4bf0b56845bdb1db63cb7f0114015"} pod="openshift-machine-config-operator/machine-config-daemon-cr62h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:24:22 crc kubenswrapper[5017]: I0122 14:24:22.886552 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" containerID="cri-o://782e38edfb21bed0b225c18b4748d2c32ff4bf0b56845bdb1db63cb7f0114015" gracePeriod=600 Jan 22 14:24:23 crc kubenswrapper[5017]: I0122 14:24:23.260854 5017 generic.go:334] "Generic (PLEG): container finished" podID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerID="782e38edfb21bed0b225c18b4748d2c32ff4bf0b56845bdb1db63cb7f0114015" exitCode=0 Jan 22 14:24:23 crc kubenswrapper[5017]: I0122 14:24:23.264881 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerDied","Data":"782e38edfb21bed0b225c18b4748d2c32ff4bf0b56845bdb1db63cb7f0114015"} Jan 22 14:24:23 crc kubenswrapper[5017]: I0122 14:24:23.264943 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerStarted","Data":"3987c105edaf2e5dad511c09d18f4279286d0985adfa938d3d148eb5e22a8796"} Jan 22 14:24:23 crc kubenswrapper[5017]: I0122 14:24:23.264968 5017 scope.go:117] "RemoveContainer" containerID="1d27c3a5c58b1e6c68ef017f8d5adf66a05dff18c5c08e0c60bb0b5a99e32c2a" Jan 22 14:25:00 crc kubenswrapper[5017]: I0122 14:25:00.560416 5017 generic.go:334] "Generic (PLEG): container finished" podID="0cf4f99f-28be-4590-a117-df6a6406f6ae" containerID="58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6" exitCode=0 Jan 22 14:25:00 crc kubenswrapper[5017]: I0122 14:25:00.560569 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-x5wtw/must-gather-n52wm" event={"ID":"0cf4f99f-28be-4590-a117-df6a6406f6ae","Type":"ContainerDied","Data":"58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6"} Jan 22 14:25:00 crc kubenswrapper[5017]: I0122 14:25:00.562634 5017 scope.go:117] "RemoveContainer" containerID="58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6" Jan 22 14:25:00 crc kubenswrapper[5017]: I0122 14:25:00.699167 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-x5wtw_must-gather-n52wm_0cf4f99f-28be-4590-a117-df6a6406f6ae/gather/0.log" Jan 22 14:25:07 crc kubenswrapper[5017]: I0122 14:25:07.927913 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-x5wtw/must-gather-n52wm"] Jan 22 14:25:07 crc kubenswrapper[5017]: I0122 14:25:07.929098 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-x5wtw/must-gather-n52wm" podUID="0cf4f99f-28be-4590-a117-df6a6406f6ae" containerName="copy" containerID="cri-o://544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c" gracePeriod=2 Jan 22 14:25:07 crc kubenswrapper[5017]: I0122 14:25:07.941771 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-x5wtw/must-gather-n52wm"] Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.377849 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-x5wtw_must-gather-n52wm_0cf4f99f-28be-4590-a117-df6a6406f6ae/copy/0.log" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.378918 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-x5wtw/must-gather-n52wm" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.545519 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0cf4f99f-28be-4590-a117-df6a6406f6ae-must-gather-output\") pod \"0cf4f99f-28be-4590-a117-df6a6406f6ae\" (UID: \"0cf4f99f-28be-4590-a117-df6a6406f6ae\") " Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.545707 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpj6p\" (UniqueName: \"kubernetes.io/projected/0cf4f99f-28be-4590-a117-df6a6406f6ae-kube-api-access-qpj6p\") pod \"0cf4f99f-28be-4590-a117-df6a6406f6ae\" (UID: \"0cf4f99f-28be-4590-a117-df6a6406f6ae\") " Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.556798 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cf4f99f-28be-4590-a117-df6a6406f6ae-kube-api-access-qpj6p" (OuterVolumeSpecName: "kube-api-access-qpj6p") pod "0cf4f99f-28be-4590-a117-df6a6406f6ae" (UID: "0cf4f99f-28be-4590-a117-df6a6406f6ae"). InnerVolumeSpecName "kube-api-access-qpj6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.622628 5017 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-x5wtw_must-gather-n52wm_0cf4f99f-28be-4590-a117-df6a6406f6ae/copy/0.log" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.623287 5017 generic.go:334] "Generic (PLEG): container finished" podID="0cf4f99f-28be-4590-a117-df6a6406f6ae" containerID="544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c" exitCode=143 Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.623387 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-x5wtw/must-gather-n52wm" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.623456 5017 scope.go:117] "RemoveContainer" containerID="544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.630428 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cf4f99f-28be-4590-a117-df6a6406f6ae-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "0cf4f99f-28be-4590-a117-df6a6406f6ae" (UID: "0cf4f99f-28be-4590-a117-df6a6406f6ae"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.640557 5017 scope.go:117] "RemoveContainer" containerID="58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.647363 5017 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0cf4f99f-28be-4590-a117-df6a6406f6ae-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.647401 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpj6p\" (UniqueName: \"kubernetes.io/projected/0cf4f99f-28be-4590-a117-df6a6406f6ae-kube-api-access-qpj6p\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.677762 5017 scope.go:117] "RemoveContainer" containerID="544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c" Jan 22 14:25:08 crc kubenswrapper[5017]: E0122 14:25:08.678351 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c\": container with ID starting with 544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c not found: ID does not exist" containerID="544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.678396 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c"} err="failed to get container status \"544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c\": rpc error: code = NotFound desc = could not find container \"544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c\": container with ID starting with 544dedf7b54bc25fe43ac760fb621bc13523a211fa8358828a4e25726b2bc54c not found: ID does not exist" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.678428 5017 scope.go:117] "RemoveContainer" containerID="58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6" Jan 22 14:25:08 crc kubenswrapper[5017]: E0122 14:25:08.678983 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6\": container with ID starting with 58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6 not found: ID does not exist" containerID="58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6" Jan 22 14:25:08 crc kubenswrapper[5017]: I0122 14:25:08.679066 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6"} err="failed to get container status \"58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6\": rpc error: code = NotFound desc = could not find container \"58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6\": container with ID starting with 58cbd012a9297fac0ef35c596b5b36afa57d49983e28998e78351e28cc64bba6 not found: ID does not exist" Jan 22 14:25:09 crc kubenswrapper[5017]: I0122 14:25:09.269432 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cf4f99f-28be-4590-a117-df6a6406f6ae" path="/var/lib/kubelet/pods/0cf4f99f-28be-4590-a117-df6a6406f6ae/volumes" Jan 22 14:26:52 crc kubenswrapper[5017]: I0122 14:26:52.883752 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:26:52 crc kubenswrapper[5017]: I0122 14:26:52.884808 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:27:22 crc kubenswrapper[5017]: I0122 14:27:22.883940 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:27:22 crc kubenswrapper[5017]: I0122 14:27:22.884817 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.269854 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cgm67"] Jan 22 14:27:37 crc kubenswrapper[5017]: E0122 14:27:37.270750 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cf4f99f-28be-4590-a117-df6a6406f6ae" containerName="gather" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.270764 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cf4f99f-28be-4590-a117-df6a6406f6ae" containerName="gather" Jan 22 14:27:37 crc kubenswrapper[5017]: E0122 14:27:37.270780 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cf4f99f-28be-4590-a117-df6a6406f6ae" containerName="copy" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.270786 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cf4f99f-28be-4590-a117-df6a6406f6ae" containerName="copy" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.270927 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cf4f99f-28be-4590-a117-df6a6406f6ae" containerName="copy" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.270941 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cf4f99f-28be-4590-a117-df6a6406f6ae" containerName="gather" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.271926 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.289597 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cgm67"] Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.312317 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h4g5\" (UniqueName: \"kubernetes.io/projected/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-kube-api-access-2h4g5\") pod \"redhat-operators-cgm67\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.312391 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-utilities\") pod \"redhat-operators-cgm67\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.312423 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-catalog-content\") pod \"redhat-operators-cgm67\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.413997 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-utilities\") pod \"redhat-operators-cgm67\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.414068 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-catalog-content\") pod \"redhat-operators-cgm67\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.414149 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h4g5\" (UniqueName: \"kubernetes.io/projected/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-kube-api-access-2h4g5\") pod \"redhat-operators-cgm67\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.415011 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-utilities\") pod \"redhat-operators-cgm67\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.415242 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-catalog-content\") pod \"redhat-operators-cgm67\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.442318 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h4g5\" (UniqueName: \"kubernetes.io/projected/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-kube-api-access-2h4g5\") pod \"redhat-operators-cgm67\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.593707 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.851223 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cgm67"] Jan 22 14:27:37 crc kubenswrapper[5017]: I0122 14:27:37.880179 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgm67" event={"ID":"b46ade51-e5a9-4ead-9354-4f43cdcef7f0","Type":"ContainerStarted","Data":"464d00345003c981167ee3747e85cdcef4cbee488dae8db6d8790ed4eb112e19"} Jan 22 14:27:38 crc kubenswrapper[5017]: I0122 14:27:38.890907 5017 generic.go:334] "Generic (PLEG): container finished" podID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerID="b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922" exitCode=0 Jan 22 14:27:38 crc kubenswrapper[5017]: I0122 14:27:38.891057 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgm67" event={"ID":"b46ade51-e5a9-4ead-9354-4f43cdcef7f0","Type":"ContainerDied","Data":"b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922"} Jan 22 14:27:38 crc kubenswrapper[5017]: I0122 14:27:38.894180 5017 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:27:40 crc kubenswrapper[5017]: I0122 14:27:40.909871 5017 generic.go:334] "Generic (PLEG): container finished" podID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerID="6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288" exitCode=0 Jan 22 14:27:40 crc kubenswrapper[5017]: I0122 14:27:40.910015 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgm67" event={"ID":"b46ade51-e5a9-4ead-9354-4f43cdcef7f0","Type":"ContainerDied","Data":"6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288"} Jan 22 14:27:41 crc kubenswrapper[5017]: I0122 14:27:41.927808 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgm67" event={"ID":"b46ade51-e5a9-4ead-9354-4f43cdcef7f0","Type":"ContainerStarted","Data":"01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638"} Jan 22 14:27:41 crc kubenswrapper[5017]: I0122 14:27:41.972564 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cgm67" podStartSLOduration=2.558270827 podStartE2EDuration="4.972528129s" podCreationTimestamp="2026-01-22 14:27:37 +0000 UTC" firstStartedPulling="2026-01-22 14:27:38.893614249 +0000 UTC m=+1473.886076127" lastFinishedPulling="2026-01-22 14:27:41.307871531 +0000 UTC m=+1476.300333429" observedRunningTime="2026-01-22 14:27:41.958526843 +0000 UTC m=+1476.950988731" watchObservedRunningTime="2026-01-22 14:27:41.972528129 +0000 UTC m=+1476.964989997" Jan 22 14:27:47 crc kubenswrapper[5017]: I0122 14:27:47.594133 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:47 crc kubenswrapper[5017]: I0122 14:27:47.596250 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:48 crc kubenswrapper[5017]: I0122 14:27:48.637373 5017 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cgm67" podUID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerName="registry-server" probeResult="failure" output=< Jan 22 14:27:48 crc kubenswrapper[5017]: timeout: failed to connect service ":50051" within 1s Jan 22 14:27:48 crc kubenswrapper[5017]: > Jan 22 14:27:52 crc kubenswrapper[5017]: I0122 14:27:52.884059 5017 patch_prober.go:28] interesting pod/machine-config-daemon-cr62h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:27:52 crc kubenswrapper[5017]: I0122 14:27:52.885005 5017 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:27:52 crc kubenswrapper[5017]: I0122 14:27:52.885067 5017 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" Jan 22 14:27:52 crc kubenswrapper[5017]: I0122 14:27:52.885925 5017 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3987c105edaf2e5dad511c09d18f4279286d0985adfa938d3d148eb5e22a8796"} pod="openshift-machine-config-operator/machine-config-daemon-cr62h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:27:52 crc kubenswrapper[5017]: I0122 14:27:52.885985 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" podUID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerName="machine-config-daemon" containerID="cri-o://3987c105edaf2e5dad511c09d18f4279286d0985adfa938d3d148eb5e22a8796" gracePeriod=600 Jan 22 14:27:54 crc kubenswrapper[5017]: I0122 14:27:54.026903 5017 generic.go:334] "Generic (PLEG): container finished" podID="3f43f877-4d1f-4041-af1f-39a962f7ac0d" containerID="3987c105edaf2e5dad511c09d18f4279286d0985adfa938d3d148eb5e22a8796" exitCode=0 Jan 22 14:27:54 crc kubenswrapper[5017]: I0122 14:27:54.026977 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerDied","Data":"3987c105edaf2e5dad511c09d18f4279286d0985adfa938d3d148eb5e22a8796"} Jan 22 14:27:54 crc kubenswrapper[5017]: I0122 14:27:54.027801 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-cr62h" event={"ID":"3f43f877-4d1f-4041-af1f-39a962f7ac0d","Type":"ContainerStarted","Data":"2caee8e839a229b5e093e1eaf59625ba55c09837b52c065bcaf2384650f7ede9"} Jan 22 14:27:54 crc kubenswrapper[5017]: I0122 14:27:54.027834 5017 scope.go:117] "RemoveContainer" containerID="782e38edfb21bed0b225c18b4748d2c32ff4bf0b56845bdb1db63cb7f0114015" Jan 22 14:27:57 crc kubenswrapper[5017]: I0122 14:27:57.643832 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:57 crc kubenswrapper[5017]: I0122 14:27:57.694019 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:57 crc kubenswrapper[5017]: I0122 14:27:57.888679 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cgm67"] Jan 22 14:27:59 crc kubenswrapper[5017]: I0122 14:27:59.482973 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cgm67" podUID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerName="registry-server" containerID="cri-o://01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638" gracePeriod=2 Jan 22 14:27:59 crc kubenswrapper[5017]: I0122 14:27:59.937545 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:27:59 crc kubenswrapper[5017]: I0122 14:27:59.996767 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-catalog-content\") pod \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " Jan 22 14:27:59 crc kubenswrapper[5017]: I0122 14:27:59.996949 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-utilities\") pod \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " Jan 22 14:27:59 crc kubenswrapper[5017]: I0122 14:27:59.997175 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2h4g5\" (UniqueName: \"kubernetes.io/projected/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-kube-api-access-2h4g5\") pod \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\" (UID: \"b46ade51-e5a9-4ead-9354-4f43cdcef7f0\") " Jan 22 14:27:59 crc kubenswrapper[5017]: I0122 14:27:59.999627 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-utilities" (OuterVolumeSpecName: "utilities") pod "b46ade51-e5a9-4ead-9354-4f43cdcef7f0" (UID: "b46ade51-e5a9-4ead-9354-4f43cdcef7f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.010462 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-kube-api-access-2h4g5" (OuterVolumeSpecName: "kube-api-access-2h4g5") pod "b46ade51-e5a9-4ead-9354-4f43cdcef7f0" (UID: "b46ade51-e5a9-4ead-9354-4f43cdcef7f0"). InnerVolumeSpecName "kube-api-access-2h4g5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.099106 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.099203 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2h4g5\" (UniqueName: \"kubernetes.io/projected/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-kube-api-access-2h4g5\") on node \"crc\" DevicePath \"\"" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.135683 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b46ade51-e5a9-4ead-9354-4f43cdcef7f0" (UID: "b46ade51-e5a9-4ead-9354-4f43cdcef7f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.201350 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b46ade51-e5a9-4ead-9354-4f43cdcef7f0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.494719 5017 generic.go:334] "Generic (PLEG): container finished" podID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerID="01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638" exitCode=0 Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.494804 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgm67" event={"ID":"b46ade51-e5a9-4ead-9354-4f43cdcef7f0","Type":"ContainerDied","Data":"01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638"} Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.494856 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgm67" event={"ID":"b46ade51-e5a9-4ead-9354-4f43cdcef7f0","Type":"ContainerDied","Data":"464d00345003c981167ee3747e85cdcef4cbee488dae8db6d8790ed4eb112e19"} Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.494888 5017 scope.go:117] "RemoveContainer" containerID="01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.495230 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgm67" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.523594 5017 scope.go:117] "RemoveContainer" containerID="6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.540702 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cgm67"] Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.548180 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cgm67"] Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.572492 5017 scope.go:117] "RemoveContainer" containerID="b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.591256 5017 scope.go:117] "RemoveContainer" containerID="01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638" Jan 22 14:28:00 crc kubenswrapper[5017]: E0122 14:28:00.591860 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638\": container with ID starting with 01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638 not found: ID does not exist" containerID="01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.591934 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638"} err="failed to get container status \"01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638\": rpc error: code = NotFound desc = could not find container \"01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638\": container with ID starting with 01f369eee28667126f0a60312056ae8fb21a4e3423f972c4297960e0c0852638 not found: ID does not exist" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.591977 5017 scope.go:117] "RemoveContainer" containerID="6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288" Jan 22 14:28:00 crc kubenswrapper[5017]: E0122 14:28:00.592440 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288\": container with ID starting with 6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288 not found: ID does not exist" containerID="6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.592468 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288"} err="failed to get container status \"6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288\": rpc error: code = NotFound desc = could not find container \"6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288\": container with ID starting with 6e4212bd4a5323a21d2235cd8ecc0b767255316dd49b953bf028f4f33fdb3288 not found: ID does not exist" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.592485 5017 scope.go:117] "RemoveContainer" containerID="b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922" Jan 22 14:28:00 crc kubenswrapper[5017]: E0122 14:28:00.592839 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922\": container with ID starting with b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922 not found: ID does not exist" containerID="b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922" Jan 22 14:28:00 crc kubenswrapper[5017]: I0122 14:28:00.592885 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922"} err="failed to get container status \"b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922\": rpc error: code = NotFound desc = could not find container \"b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922\": container with ID starting with b3525e4ee365124195d25f9b90a0597e955c8a099a30d1117153bc74fa486922 not found: ID does not exist" Jan 22 14:28:01 crc kubenswrapper[5017]: I0122 14:28:01.267016 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" path="/var/lib/kubelet/pods/b46ade51-e5a9-4ead-9354-4f43cdcef7f0/volumes" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.108019 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xqn8j"] Jan 22 14:28:50 crc kubenswrapper[5017]: E0122 14:28:50.109049 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerName="registry-server" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.109073 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerName="registry-server" Jan 22 14:28:50 crc kubenswrapper[5017]: E0122 14:28:50.109095 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerName="extract-content" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.109103 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerName="extract-content" Jan 22 14:28:50 crc kubenswrapper[5017]: E0122 14:28:50.109141 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerName="extract-utilities" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.109172 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerName="extract-utilities" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.109306 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="b46ade51-e5a9-4ead-9354-4f43cdcef7f0" containerName="registry-server" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.110368 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.122541 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xqn8j"] Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.165944 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8phb\" (UniqueName: \"kubernetes.io/projected/66dad38e-485d-4299-85bf-6840efe97dd3-kube-api-access-j8phb\") pod \"community-operators-xqn8j\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.166330 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-utilities\") pod \"community-operators-xqn8j\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.166481 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-catalog-content\") pod \"community-operators-xqn8j\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.268689 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8phb\" (UniqueName: \"kubernetes.io/projected/66dad38e-485d-4299-85bf-6840efe97dd3-kube-api-access-j8phb\") pod \"community-operators-xqn8j\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.268804 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-utilities\") pod \"community-operators-xqn8j\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.268841 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-catalog-content\") pod \"community-operators-xqn8j\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.269458 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-catalog-content\") pod \"community-operators-xqn8j\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.269922 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-utilities\") pod \"community-operators-xqn8j\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.305179 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8phb\" (UniqueName: \"kubernetes.io/projected/66dad38e-485d-4299-85bf-6840efe97dd3-kube-api-access-j8phb\") pod \"community-operators-xqn8j\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.428164 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.815705 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xqn8j"] Jan 22 14:28:50 crc kubenswrapper[5017]: I0122 14:28:50.920296 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqn8j" event={"ID":"66dad38e-485d-4299-85bf-6840efe97dd3","Type":"ContainerStarted","Data":"674124acee0a8251a1f5a016d0a6063e77331066eaeaeb228caf515a612e387d"} Jan 22 14:28:51 crc kubenswrapper[5017]: I0122 14:28:51.931348 5017 generic.go:334] "Generic (PLEG): container finished" podID="66dad38e-485d-4299-85bf-6840efe97dd3" containerID="d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad" exitCode=0 Jan 22 14:28:51 crc kubenswrapper[5017]: I0122 14:28:51.931466 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqn8j" event={"ID":"66dad38e-485d-4299-85bf-6840efe97dd3","Type":"ContainerDied","Data":"d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad"} Jan 22 14:28:53 crc kubenswrapper[5017]: I0122 14:28:53.950523 5017 generic.go:334] "Generic (PLEG): container finished" podID="66dad38e-485d-4299-85bf-6840efe97dd3" containerID="de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1" exitCode=0 Jan 22 14:28:53 crc kubenswrapper[5017]: I0122 14:28:53.950654 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqn8j" event={"ID":"66dad38e-485d-4299-85bf-6840efe97dd3","Type":"ContainerDied","Data":"de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1"} Jan 22 14:28:54 crc kubenswrapper[5017]: I0122 14:28:54.961283 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqn8j" event={"ID":"66dad38e-485d-4299-85bf-6840efe97dd3","Type":"ContainerStarted","Data":"b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84"} Jan 22 14:28:54 crc kubenswrapper[5017]: I0122 14:28:54.986654 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xqn8j" podStartSLOduration=2.576262883 podStartE2EDuration="4.986625823s" podCreationTimestamp="2026-01-22 14:28:50 +0000 UTC" firstStartedPulling="2026-01-22 14:28:51.935659189 +0000 UTC m=+1546.928121047" lastFinishedPulling="2026-01-22 14:28:54.346022129 +0000 UTC m=+1549.338483987" observedRunningTime="2026-01-22 14:28:54.981087035 +0000 UTC m=+1549.973548913" watchObservedRunningTime="2026-01-22 14:28:54.986625823 +0000 UTC m=+1549.979087681" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.368544 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qcmrx"] Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.373443 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.396396 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qcmrx"] Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.514396 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-utilities\") pod \"redhat-marketplace-qcmrx\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.515055 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l89vm\" (UniqueName: \"kubernetes.io/projected/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-kube-api-access-l89vm\") pod \"redhat-marketplace-qcmrx\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.515281 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-catalog-content\") pod \"redhat-marketplace-qcmrx\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.616916 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-catalog-content\") pod \"redhat-marketplace-qcmrx\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.617019 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-utilities\") pod \"redhat-marketplace-qcmrx\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.617046 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l89vm\" (UniqueName: \"kubernetes.io/projected/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-kube-api-access-l89vm\") pod \"redhat-marketplace-qcmrx\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.617689 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-catalog-content\") pod \"redhat-marketplace-qcmrx\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.618159 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-utilities\") pod \"redhat-marketplace-qcmrx\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.648760 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l89vm\" (UniqueName: \"kubernetes.io/projected/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-kube-api-access-l89vm\") pod \"redhat-marketplace-qcmrx\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:57 crc kubenswrapper[5017]: I0122 14:28:57.696685 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:28:58 crc kubenswrapper[5017]: I0122 14:28:58.169153 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qcmrx"] Jan 22 14:28:58 crc kubenswrapper[5017]: I0122 14:28:58.991480 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcmrx" event={"ID":"e4d618f2-fbec-4f61-9458-c13ace1c1ed2","Type":"ContainerStarted","Data":"45b3c2235caa4fd3aa8ed67304d4cf94ec3b739a70a62290d859ccd0fd70ac1c"} Jan 22 14:29:00 crc kubenswrapper[5017]: I0122 14:29:00.001509 5017 generic.go:334] "Generic (PLEG): container finished" podID="e4d618f2-fbec-4f61-9458-c13ace1c1ed2" containerID="ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8" exitCode=0 Jan 22 14:29:00 crc kubenswrapper[5017]: I0122 14:29:00.001638 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcmrx" event={"ID":"e4d618f2-fbec-4f61-9458-c13ace1c1ed2","Type":"ContainerDied","Data":"ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8"} Jan 22 14:29:00 crc kubenswrapper[5017]: I0122 14:29:00.428700 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:29:00 crc kubenswrapper[5017]: I0122 14:29:00.428781 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:29:00 crc kubenswrapper[5017]: I0122 14:29:00.472231 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:29:01 crc kubenswrapper[5017]: I0122 14:29:01.011871 5017 generic.go:334] "Generic (PLEG): container finished" podID="e4d618f2-fbec-4f61-9458-c13ace1c1ed2" containerID="391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130" exitCode=0 Jan 22 14:29:01 crc kubenswrapper[5017]: I0122 14:29:01.011944 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcmrx" event={"ID":"e4d618f2-fbec-4f61-9458-c13ace1c1ed2","Type":"ContainerDied","Data":"391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130"} Jan 22 14:29:01 crc kubenswrapper[5017]: I0122 14:29:01.058739 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:29:02 crc kubenswrapper[5017]: I0122 14:29:02.024603 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcmrx" event={"ID":"e4d618f2-fbec-4f61-9458-c13ace1c1ed2","Type":"ContainerStarted","Data":"03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934"} Jan 22 14:29:02 crc kubenswrapper[5017]: I0122 14:29:02.320803 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xqn8j"] Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.031018 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xqn8j" podUID="66dad38e-485d-4299-85bf-6840efe97dd3" containerName="registry-server" containerID="cri-o://b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84" gracePeriod=2 Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.060053 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qcmrx" podStartSLOduration=4.345555029 podStartE2EDuration="6.060026944s" podCreationTimestamp="2026-01-22 14:28:57 +0000 UTC" firstStartedPulling="2026-01-22 14:29:00.005687974 +0000 UTC m=+1554.998149862" lastFinishedPulling="2026-01-22 14:29:01.720159919 +0000 UTC m=+1556.712621777" observedRunningTime="2026-01-22 14:29:03.055297679 +0000 UTC m=+1558.047759537" watchObservedRunningTime="2026-01-22 14:29:03.060026944 +0000 UTC m=+1558.052488802" Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.430503 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.515247 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-catalog-content\") pod \"66dad38e-485d-4299-85bf-6840efe97dd3\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.515431 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-utilities\") pod \"66dad38e-485d-4299-85bf-6840efe97dd3\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.515521 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8phb\" (UniqueName: \"kubernetes.io/projected/66dad38e-485d-4299-85bf-6840efe97dd3-kube-api-access-j8phb\") pod \"66dad38e-485d-4299-85bf-6840efe97dd3\" (UID: \"66dad38e-485d-4299-85bf-6840efe97dd3\") " Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.516579 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-utilities" (OuterVolumeSpecName: "utilities") pod "66dad38e-485d-4299-85bf-6840efe97dd3" (UID: "66dad38e-485d-4299-85bf-6840efe97dd3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.525373 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66dad38e-485d-4299-85bf-6840efe97dd3-kube-api-access-j8phb" (OuterVolumeSpecName: "kube-api-access-j8phb") pod "66dad38e-485d-4299-85bf-6840efe97dd3" (UID: "66dad38e-485d-4299-85bf-6840efe97dd3"). InnerVolumeSpecName "kube-api-access-j8phb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.584719 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66dad38e-485d-4299-85bf-6840efe97dd3" (UID: "66dad38e-485d-4299-85bf-6840efe97dd3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.618046 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.618083 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66dad38e-485d-4299-85bf-6840efe97dd3-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:29:03 crc kubenswrapper[5017]: I0122 14:29:03.618096 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8phb\" (UniqueName: \"kubernetes.io/projected/66dad38e-485d-4299-85bf-6840efe97dd3-kube-api-access-j8phb\") on node \"crc\" DevicePath \"\"" Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.037825 5017 generic.go:334] "Generic (PLEG): container finished" podID="66dad38e-485d-4299-85bf-6840efe97dd3" containerID="b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84" exitCode=0 Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.037897 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqn8j" event={"ID":"66dad38e-485d-4299-85bf-6840efe97dd3","Type":"ContainerDied","Data":"b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84"} Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.038038 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqn8j" event={"ID":"66dad38e-485d-4299-85bf-6840efe97dd3","Type":"ContainerDied","Data":"674124acee0a8251a1f5a016d0a6063e77331066eaeaeb228caf515a612e387d"} Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.038071 5017 scope.go:117] "RemoveContainer" containerID="b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84" Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.037950 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqn8j" Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.056496 5017 scope.go:117] "RemoveContainer" containerID="de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1" Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.079706 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xqn8j"] Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.086579 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xqn8j"] Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.094176 5017 scope.go:117] "RemoveContainer" containerID="d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad" Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.113565 5017 scope.go:117] "RemoveContainer" containerID="b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84" Jan 22 14:29:04 crc kubenswrapper[5017]: E0122 14:29:04.113962 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84\": container with ID starting with b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84 not found: ID does not exist" containerID="b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84" Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.114002 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84"} err="failed to get container status \"b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84\": rpc error: code = NotFound desc = could not find container \"b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84\": container with ID starting with b4060e69886436e97432f9b2007cf8ab5a01fc28296eaf39edbf42ac3228bc84 not found: ID does not exist" Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.114027 5017 scope.go:117] "RemoveContainer" containerID="de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1" Jan 22 14:29:04 crc kubenswrapper[5017]: E0122 14:29:04.114336 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1\": container with ID starting with de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1 not found: ID does not exist" containerID="de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1" Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.114357 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1"} err="failed to get container status \"de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1\": rpc error: code = NotFound desc = could not find container \"de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1\": container with ID starting with de8219093673c6c572414df4959dfdf61de49449ac9602d2ead73fb8afc262f1 not found: ID does not exist" Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.114372 5017 scope.go:117] "RemoveContainer" containerID="d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad" Jan 22 14:29:04 crc kubenswrapper[5017]: E0122 14:29:04.114595 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad\": container with ID starting with d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad not found: ID does not exist" containerID="d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad" Jan 22 14:29:04 crc kubenswrapper[5017]: I0122 14:29:04.114615 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad"} err="failed to get container status \"d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad\": rpc error: code = NotFound desc = could not find container \"d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad\": container with ID starting with d8d1db17acc49bf3b74cd13bd4cc59dd5505c8daff4136cb5cf13c461ff879ad not found: ID does not exist" Jan 22 14:29:05 crc kubenswrapper[5017]: I0122 14:29:05.271671 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66dad38e-485d-4299-85bf-6840efe97dd3" path="/var/lib/kubelet/pods/66dad38e-485d-4299-85bf-6840efe97dd3/volumes" Jan 22 14:29:06 crc kubenswrapper[5017]: I0122 14:29:06.942949 5017 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vtnm9"] Jan 22 14:29:06 crc kubenswrapper[5017]: E0122 14:29:06.943360 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dad38e-485d-4299-85bf-6840efe97dd3" containerName="extract-utilities" Jan 22 14:29:06 crc kubenswrapper[5017]: I0122 14:29:06.943376 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dad38e-485d-4299-85bf-6840efe97dd3" containerName="extract-utilities" Jan 22 14:29:06 crc kubenswrapper[5017]: E0122 14:29:06.943393 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dad38e-485d-4299-85bf-6840efe97dd3" containerName="registry-server" Jan 22 14:29:06 crc kubenswrapper[5017]: I0122 14:29:06.943401 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dad38e-485d-4299-85bf-6840efe97dd3" containerName="registry-server" Jan 22 14:29:06 crc kubenswrapper[5017]: E0122 14:29:06.943417 5017 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dad38e-485d-4299-85bf-6840efe97dd3" containerName="extract-content" Jan 22 14:29:06 crc kubenswrapper[5017]: I0122 14:29:06.943426 5017 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dad38e-485d-4299-85bf-6840efe97dd3" containerName="extract-content" Jan 22 14:29:06 crc kubenswrapper[5017]: I0122 14:29:06.943578 5017 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dad38e-485d-4299-85bf-6840efe97dd3" containerName="registry-server" Jan 22 14:29:06 crc kubenswrapper[5017]: I0122 14:29:06.944857 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:06 crc kubenswrapper[5017]: I0122 14:29:06.956011 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vtnm9"] Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.077142 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-catalog-content\") pod \"certified-operators-vtnm9\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.077239 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxcmh\" (UniqueName: \"kubernetes.io/projected/ccf7171c-730e-4d26-8928-f428a3cac003-kube-api-access-hxcmh\") pod \"certified-operators-vtnm9\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.077388 5017 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-utilities\") pod \"certified-operators-vtnm9\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.179609 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxcmh\" (UniqueName: \"kubernetes.io/projected/ccf7171c-730e-4d26-8928-f428a3cac003-kube-api-access-hxcmh\") pod \"certified-operators-vtnm9\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.180164 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-utilities\") pod \"certified-operators-vtnm9\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.180246 5017 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-catalog-content\") pod \"certified-operators-vtnm9\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.180958 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-catalog-content\") pod \"certified-operators-vtnm9\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.181075 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-utilities\") pod \"certified-operators-vtnm9\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.202545 5017 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxcmh\" (UniqueName: \"kubernetes.io/projected/ccf7171c-730e-4d26-8928-f428a3cac003-kube-api-access-hxcmh\") pod \"certified-operators-vtnm9\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.271623 5017 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.594797 5017 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vtnm9"] Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.696938 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.697026 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:29:07 crc kubenswrapper[5017]: I0122 14:29:07.770903 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:29:08 crc kubenswrapper[5017]: I0122 14:29:08.067974 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtnm9" event={"ID":"ccf7171c-730e-4d26-8928-f428a3cac003","Type":"ContainerStarted","Data":"72246ff01e1850df130f855e6aea3721415de1065e6021add3f0eccc3dfc1302"} Jan 22 14:29:08 crc kubenswrapper[5017]: I0122 14:29:08.110740 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:29:09 crc kubenswrapper[5017]: I0122 14:29:09.080058 5017 generic.go:334] "Generic (PLEG): container finished" podID="ccf7171c-730e-4d26-8928-f428a3cac003" containerID="1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d" exitCode=0 Jan 22 14:29:09 crc kubenswrapper[5017]: I0122 14:29:09.080245 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtnm9" event={"ID":"ccf7171c-730e-4d26-8928-f428a3cac003","Type":"ContainerDied","Data":"1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d"} Jan 22 14:29:10 crc kubenswrapper[5017]: I0122 14:29:10.120173 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qcmrx"] Jan 22 14:29:10 crc kubenswrapper[5017]: I0122 14:29:10.120852 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qcmrx" podUID="e4d618f2-fbec-4f61-9458-c13ace1c1ed2" containerName="registry-server" containerID="cri-o://03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934" gracePeriod=2 Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.053038 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.095640 5017 generic.go:334] "Generic (PLEG): container finished" podID="ccf7171c-730e-4d26-8928-f428a3cac003" containerID="a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841" exitCode=0 Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.095719 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtnm9" event={"ID":"ccf7171c-730e-4d26-8928-f428a3cac003","Type":"ContainerDied","Data":"a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841"} Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.103946 5017 generic.go:334] "Generic (PLEG): container finished" podID="e4d618f2-fbec-4f61-9458-c13ace1c1ed2" containerID="03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934" exitCode=0 Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.104010 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcmrx" event={"ID":"e4d618f2-fbec-4f61-9458-c13ace1c1ed2","Type":"ContainerDied","Data":"03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934"} Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.104071 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qcmrx" event={"ID":"e4d618f2-fbec-4f61-9458-c13ace1c1ed2","Type":"ContainerDied","Data":"45b3c2235caa4fd3aa8ed67304d4cf94ec3b739a70a62290d859ccd0fd70ac1c"} Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.104097 5017 scope.go:117] "RemoveContainer" containerID="03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.104132 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qcmrx" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.130032 5017 scope.go:117] "RemoveContainer" containerID="391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.150315 5017 scope.go:117] "RemoveContainer" containerID="ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.160655 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l89vm\" (UniqueName: \"kubernetes.io/projected/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-kube-api-access-l89vm\") pod \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.160746 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-utilities\") pod \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.160871 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-catalog-content\") pod \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\" (UID: \"e4d618f2-fbec-4f61-9458-c13ace1c1ed2\") " Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.162952 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-utilities" (OuterVolumeSpecName: "utilities") pod "e4d618f2-fbec-4f61-9458-c13ace1c1ed2" (UID: "e4d618f2-fbec-4f61-9458-c13ace1c1ed2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.169398 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-kube-api-access-l89vm" (OuterVolumeSpecName: "kube-api-access-l89vm") pod "e4d618f2-fbec-4f61-9458-c13ace1c1ed2" (UID: "e4d618f2-fbec-4f61-9458-c13ace1c1ed2"). InnerVolumeSpecName "kube-api-access-l89vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.181583 5017 scope.go:117] "RemoveContainer" containerID="03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934" Jan 22 14:29:11 crc kubenswrapper[5017]: E0122 14:29:11.182236 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934\": container with ID starting with 03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934 not found: ID does not exist" containerID="03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.182286 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934"} err="failed to get container status \"03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934\": rpc error: code = NotFound desc = could not find container \"03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934\": container with ID starting with 03dd3591abc19a2998948a23e6d072c080a8ac01600ddbb2bdb14e009d0b0934 not found: ID does not exist" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.182318 5017 scope.go:117] "RemoveContainer" containerID="391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130" Jan 22 14:29:11 crc kubenswrapper[5017]: E0122 14:29:11.182574 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130\": container with ID starting with 391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130 not found: ID does not exist" containerID="391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.182603 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130"} err="failed to get container status \"391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130\": rpc error: code = NotFound desc = could not find container \"391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130\": container with ID starting with 391d0563bd6494e9d61092f59ee2fe19f43345404c3fc10a38ec7cd9b615e130 not found: ID does not exist" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.182620 5017 scope.go:117] "RemoveContainer" containerID="ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8" Jan 22 14:29:11 crc kubenswrapper[5017]: E0122 14:29:11.182846 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8\": container with ID starting with ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8 not found: ID does not exist" containerID="ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.182877 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8"} err="failed to get container status \"ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8\": rpc error: code = NotFound desc = could not find container \"ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8\": container with ID starting with ac9e629cd49c8585030016e0dcc3aa6b713c931673884d13c122af56348451e8 not found: ID does not exist" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.189811 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e4d618f2-fbec-4f61-9458-c13ace1c1ed2" (UID: "e4d618f2-fbec-4f61-9458-c13ace1c1ed2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.263361 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.263839 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l89vm\" (UniqueName: \"kubernetes.io/projected/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-kube-api-access-l89vm\") on node \"crc\" DevicePath \"\"" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.263913 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4d618f2-fbec-4f61-9458-c13ace1c1ed2-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.435157 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qcmrx"] Jan 22 14:29:11 crc kubenswrapper[5017]: I0122 14:29:11.440299 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qcmrx"] Jan 22 14:29:13 crc kubenswrapper[5017]: I0122 14:29:13.122841 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtnm9" event={"ID":"ccf7171c-730e-4d26-8928-f428a3cac003","Type":"ContainerStarted","Data":"59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296"} Jan 22 14:29:13 crc kubenswrapper[5017]: I0122 14:29:13.153264 5017 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vtnm9" podStartSLOduration=4.340046008 podStartE2EDuration="7.153250532s" podCreationTimestamp="2026-01-22 14:29:06 +0000 UTC" firstStartedPulling="2026-01-22 14:29:09.082360759 +0000 UTC m=+1564.074822617" lastFinishedPulling="2026-01-22 14:29:11.895565283 +0000 UTC m=+1566.888027141" observedRunningTime="2026-01-22 14:29:13.150593166 +0000 UTC m=+1568.143055024" watchObservedRunningTime="2026-01-22 14:29:13.153250532 +0000 UTC m=+1568.145712390" Jan 22 14:29:13 crc kubenswrapper[5017]: I0122 14:29:13.269602 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4d618f2-fbec-4f61-9458-c13ace1c1ed2" path="/var/lib/kubelet/pods/e4d618f2-fbec-4f61-9458-c13ace1c1ed2/volumes" Jan 22 14:29:17 crc kubenswrapper[5017]: I0122 14:29:17.272251 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:17 crc kubenswrapper[5017]: I0122 14:29:17.272736 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:17 crc kubenswrapper[5017]: I0122 14:29:17.327034 5017 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:18 crc kubenswrapper[5017]: I0122 14:29:18.216211 5017 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:18 crc kubenswrapper[5017]: I0122 14:29:18.270256 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vtnm9"] Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.185177 5017 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vtnm9" podUID="ccf7171c-730e-4d26-8928-f428a3cac003" containerName="registry-server" containerID="cri-o://59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296" gracePeriod=2 Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.588343 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.624362 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-utilities\") pod \"ccf7171c-730e-4d26-8928-f428a3cac003\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.624435 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxcmh\" (UniqueName: \"kubernetes.io/projected/ccf7171c-730e-4d26-8928-f428a3cac003-kube-api-access-hxcmh\") pod \"ccf7171c-730e-4d26-8928-f428a3cac003\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.624624 5017 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-catalog-content\") pod \"ccf7171c-730e-4d26-8928-f428a3cac003\" (UID: \"ccf7171c-730e-4d26-8928-f428a3cac003\") " Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.626217 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-utilities" (OuterVolumeSpecName: "utilities") pod "ccf7171c-730e-4d26-8928-f428a3cac003" (UID: "ccf7171c-730e-4d26-8928-f428a3cac003"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.634450 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccf7171c-730e-4d26-8928-f428a3cac003-kube-api-access-hxcmh" (OuterVolumeSpecName: "kube-api-access-hxcmh") pod "ccf7171c-730e-4d26-8928-f428a3cac003" (UID: "ccf7171c-730e-4d26-8928-f428a3cac003"). InnerVolumeSpecName "kube-api-access-hxcmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.676534 5017 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ccf7171c-730e-4d26-8928-f428a3cac003" (UID: "ccf7171c-730e-4d26-8928-f428a3cac003"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.726803 5017 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.727330 5017 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccf7171c-730e-4d26-8928-f428a3cac003-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:29:20 crc kubenswrapper[5017]: I0122 14:29:20.727431 5017 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxcmh\" (UniqueName: \"kubernetes.io/projected/ccf7171c-730e-4d26-8928-f428a3cac003-kube-api-access-hxcmh\") on node \"crc\" DevicePath \"\"" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.195666 5017 generic.go:334] "Generic (PLEG): container finished" podID="ccf7171c-730e-4d26-8928-f428a3cac003" containerID="59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296" exitCode=0 Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.195738 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtnm9" event={"ID":"ccf7171c-730e-4d26-8928-f428a3cac003","Type":"ContainerDied","Data":"59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296"} Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.195778 5017 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtnm9" event={"ID":"ccf7171c-730e-4d26-8928-f428a3cac003","Type":"ContainerDied","Data":"72246ff01e1850df130f855e6aea3721415de1065e6021add3f0eccc3dfc1302"} Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.195801 5017 scope.go:117] "RemoveContainer" containerID="59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.195993 5017 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtnm9" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.221063 5017 scope.go:117] "RemoveContainer" containerID="a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.233731 5017 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vtnm9"] Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.239370 5017 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vtnm9"] Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.263853 5017 scope.go:117] "RemoveContainer" containerID="1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.268570 5017 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccf7171c-730e-4d26-8928-f428a3cac003" path="/var/lib/kubelet/pods/ccf7171c-730e-4d26-8928-f428a3cac003/volumes" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.287093 5017 scope.go:117] "RemoveContainer" containerID="59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296" Jan 22 14:29:21 crc kubenswrapper[5017]: E0122 14:29:21.288420 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296\": container with ID starting with 59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296 not found: ID does not exist" containerID="59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.288768 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296"} err="failed to get container status \"59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296\": rpc error: code = NotFound desc = could not find container \"59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296\": container with ID starting with 59a5819d48bcd25258b8aae33decbd1b25c7ca49170309e2b31aff17ea9cb296 not found: ID does not exist" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.288870 5017 scope.go:117] "RemoveContainer" containerID="a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841" Jan 22 14:29:21 crc kubenswrapper[5017]: E0122 14:29:21.289356 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841\": container with ID starting with a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841 not found: ID does not exist" containerID="a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.289385 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841"} err="failed to get container status \"a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841\": rpc error: code = NotFound desc = could not find container \"a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841\": container with ID starting with a4aa9dcd5933464abffbf2249f1fbf3bae378d9f224fdacc2d99570d64954841 not found: ID does not exist" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.289407 5017 scope.go:117] "RemoveContainer" containerID="1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d" Jan 22 14:29:21 crc kubenswrapper[5017]: E0122 14:29:21.289670 5017 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d\": container with ID starting with 1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d not found: ID does not exist" containerID="1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d" Jan 22 14:29:21 crc kubenswrapper[5017]: I0122 14:29:21.289697 5017 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d"} err="failed to get container status \"1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d\": rpc error: code = NotFound desc = could not find container \"1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d\": container with ID starting with 1cd099872f1b6f880524afc87ecdbd8e2b6f2f1006b0d912a4d7747b7f7c262d not found: ID does not exist" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515134432121024441 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015134432122017357 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015134426507016514 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015134426510015456 5ustar corecore